input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Component for the Portuguese weather service - IPMA.""" from homeassistant.core import Config, HomeAssistant from .config_flow import IpmaFlowHandler # noqa: F401 from .const import DOMAIN # noqa: F401 DEFAULT_NAME = "ipma" async def async_setup(hass: HomeAssistant, config: Config) -> bool: """Set up configured IPMA.""" return True async def async_setup_entry(hass, config_entry): """Set up IPMA station as config entry.""" hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, "weather") ) return True async def async_unload_entry(hass, config_entry): """Unload a config entry.""" await hass.config_entries.async_forward_entry_unload(config_entry, "weather") return True
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/ipma/__init__.py
"""Support for LIFX Cloud scenes.""" import asyncio import logging from typing import Any import aiohttp from aiohttp.hdrs import AUTHORIZATION import async_timeout import voluptuous as vol from homeassistant.components.scene import Scene from homeassistant.const import ( CONF_PLATFORM, CONF_TIMEOUT, CONF_TOKEN, HTTP_OK, HTTP_UNAUTHORIZED, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_TIMEOUT = 10 PLATFORM_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): "lifx_cloud", vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the scenes stored in the LIFX Cloud.""" token = config.get(CONF_TOKEN) timeout = config.get(CONF_TIMEOUT) headers = {AUTHORIZATION: f"Bearer {token}"} url = "https://api.lifx.com/v1/scenes" try: httpsession = async_get_clientsession(hass) with async_timeout.timeout(timeout): scenes_resp = await httpsession.get(url, headers=headers) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.exception("Error on %s", url) return False status = scenes_resp.status if status == HTTP_OK: data = await scenes_resp.json() devices = [LifxCloudScene(hass, headers, timeout, scene) for scene in data] async_add_entities(devices) return True if status == HTTP_UNAUTHORIZED: _LOGGER.error("Unauthorized (bad token?) on %s", url) return False _LOGGER.error("HTTP error %d on %s", scenes_resp.status, url) return False class LifxCloudScene(Scene): """Representation of a LIFX Cloud scene.""" def __init__(self, hass, headers, timeout, scene_data): """Initialize the scene.""" self.hass = hass self._headers = headers self._timeout = timeout self._name = scene_data["name"] self._uuid = scene_data["uuid"] @property def name(self): """Return the name of the scene.""" return self._name async def async_activate(self, **kwargs: Any) -> None: """Activate the scene.""" url = f"https://api.lifx.com/v1/scenes/scene_id:{self._uuid}/activate" try: httpsession = async_get_clientsession(self.hass) with async_timeout.timeout(self._timeout): await httpsession.put(url, headers=self._headers) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.exception("Error on %s", url)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/lifx_cloud/scene.py
"""Support for Toon switches.""" from typing import Any from toonapi import ( ACTIVE_STATE_AWAY, ACTIVE_STATE_HOLIDAY, PROGRAM_STATE_OFF, PROGRAM_STATE_ON, ) from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType from .const import ( ATTR_DEFAULT_ENABLED, ATTR_ICON, ATTR_INVERTED, ATTR_MEASUREMENT, ATTR_NAME, ATTR_SECTION, DOMAIN, SWITCH_ENTITIES, ) from .coordinator import ToonDataUpdateCoordinator from .helpers import toon_exception_handler from .models import ToonDisplayDeviceEntity, ToonEntity async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up a Toon switches based on a config entry.""" coordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities( [ToonProgramSwitch(coordinator), ToonHolidayModeSwitch(coordinator)] ) class ToonSwitch(ToonEntity, SwitchEntity): """Defines an Toon switch.""" def __init__(self, coordinator: ToonDataUpdateCoordinator, *, key: str) -> None: """Initialize the Toon switch.""" self.key = key super().__init__( coordinator, enabled_default=SWITCH_ENTITIES[key][ATTR_DEFAULT_ENABLED], icon=SWITCH_ENTITIES[key][ATTR_ICON], name=SWITCH_ENTITIES[key][ATTR_NAME], ) @property def unique_id(self) -> str: """Return the unique ID for this binary sensor.""" agreement_id = self.coordinator.data.agreement.agreement_id return f"{agreement_id}_{self.key}" @property def is_on(self) -> bool: """Return the status of the binary sensor.""" section = getattr( self.coordinator.data, SWITCH_ENTITIES[self.key][ATTR_SECTION] ) value = getattr(section, SWITCH_ENTITIES[self.key][ATTR_MEASUREMENT]) if SWITCH_ENTITIES[self.key][ATTR_INVERTED]: return not value return value class ToonProgramSwitch(ToonSwitch, ToonDisplayDeviceEntity): """Defines a Toon program switch.""" def __init__(self, coordinator: ToonDataUpdateCoordinator) -> None: """Initialize the Toon program switch.""" super().__init__(coordinator, key="thermostat_program") @toon_exception_handler async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the Toon program switch.""" await self.coordinator.toon.set_active_state( ACTIVE_STATE_AWAY, PROGRAM_STATE_OFF ) @toon_exception_handler async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the Toon program switch.""" await self.coordinator.toon.set_active_state( ACTIVE_STATE_AWAY, PROGRAM_STATE_ON ) class ToonHolidayModeSwitch(ToonSwitch, ToonDisplayDeviceEntity): """Defines a Toon Holiday mode switch.""" def __init__(self, coordinator: ToonDataUpdateCoordinator) -> None: """Initialize the Toon holiday switch.""" super().__init__(coordinator, key="thermostat_holiday_mode") @toon_exception_handler async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the Toon holiday mode switch.""" await self.coordinator.toon.set_active_state( ACTIVE_STATE_AWAY, PROGRAM_STATE_ON ) @toon_exception_handler async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the Toon holiday mode switch.""" await self.coordinator.toon.set_active_state( ACTIVE_STATE_HOLIDAY, PROGRAM_STATE_OFF )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/toon/switch.py
"""Support for raspihats board binary sensors.""" import logging import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity from homeassistant.const import ( CONF_ADDRESS, CONF_DEVICE_CLASS, CONF_NAME, DEVICE_DEFAULT_NAME, ) import homeassistant.helpers.config_validation as cv from . import ( CONF_BOARD, CONF_CHANNELS, CONF_I2C_HATS, CONF_INDEX, CONF_INVERT_LOGIC, I2C_HAT_NAMES, I2C_HATS_MANAGER, I2CHatsException, ) _LOGGER = logging.getLogger(__name__) DEFAULT_INVERT_LOGIC = False DEFAULT_DEVICE_CLASS = None _CHANNELS_SCHEMA = vol.Schema( [ { vol.Required(CONF_INDEX): cv.positive_int, vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, vol.Optional(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): cv.string, } ] ) _I2C_HATS_SCHEMA = vol.Schema( [ { vol.Required(CONF_BOARD): vol.In(I2C_HAT_NAMES), vol.Required(CONF_ADDRESS): vol.Coerce(int), vol.Required(CONF_CHANNELS): _CHANNELS_SCHEMA, } ] ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_I2C_HATS): _I2C_HATS_SCHEMA} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the raspihats binary_sensor devices.""" I2CHatBinarySensor.I2C_HATS_MANAGER = hass.data[I2C_HATS_MANAGER] binary_sensors = [] i2c_hat_configs = config.get(CONF_I2C_HATS) for i2c_hat_config in i2c_hat_configs: address = i2c_hat_config[CONF_ADDRESS] board = i2c_hat_config[CONF_BOARD] try: I2CHatBinarySensor.I2C_HATS_MANAGER.register_board(board, address) for channel_config in i2c_hat_config[CONF_CHANNELS]: binary_sensors.append( I2CHatBinarySensor( address, channel_config[CONF_INDEX], channel_config[CONF_NAME], channel_config[CONF_INVERT_LOGIC], channel_config[CONF_DEVICE_CLASS], ) ) except I2CHatsException as ex: _LOGGER.error( "Failed to register %s I2CHat@%s %s", board, hex(address), str(ex) ) add_entities(binary_sensors) class I2CHatBinarySensor(BinarySensorEntity): """Representation of a binary sensor that uses a I2C-HAT digital input.""" I2C_HATS_MANAGER = None def __init__(self, address, channel, name, invert_logic, device_class): """Initialize the raspihats sensor.""" self._address = address self._channel = channel self._name = name or DEVICE_DEFAULT_NAME self._invert_logic = invert_logic self._device_class = device_class self._state = self.I2C_HATS_MANAGER.read_di(self._address, self._channel) def online_callback(): """Call fired when board is online.""" self.schedule_update_ha_state() self.I2C_HATS_MANAGER.register_online_callback( self._address, self._channel, online_callback ) def edge_callback(state): """Read digital input state.""" self._state = state self.schedule_update_ha_state() self.I2C_HATS_MANAGER.register_di_callback( self._address, self._channel, edge_callback ) @property def device_class(self): """Return the class of this sensor.""" return self._device_class @property def name(self): """Return the name of this sensor.""" return self._name @property def should_poll(self): """No polling needed for this sensor.""" return False @property def is_on(self): """Return the state of this sensor.""" return self._state != self._invert_logic
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/raspihats/binary_sensor.py
"""Support for Android IP Webcam sensors.""" from homeassistant.helpers.icon import icon_for_battery_level from . import ( CONF_HOST, CONF_NAME, CONF_SENSORS, DATA_IP_WEBCAM, ICON_MAP, KEY_MAP, AndroidIPCamEntity, ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the IP Webcam Sensor.""" if discovery_info is None: return host = discovery_info[CONF_HOST] name = discovery_info[CONF_NAME] sensors = discovery_info[CONF_SENSORS] ipcam = hass.data[DATA_IP_WEBCAM][host] all_sensors = [] for sensor in sensors: all_sensors.append(IPWebcamSensor(name, host, ipcam, sensor)) async_add_entities(all_sensors, True) class IPWebcamSensor(AndroidIPCamEntity): """Representation of a IP Webcam sensor.""" def __init__(self, name, host, ipcam, sensor): """Initialize the sensor.""" super().__init__(host, ipcam) self._sensor = sensor self._mapped_name = KEY_MAP.get(self._sensor, self._sensor) self._name = f"{name} {self._mapped_name}" self._state = None self._unit = None @property def name(self): """Return the name of the sensor, if any.""" return self._name @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit @property def state(self): """Return the state of the sensor.""" return self._state async def async_update(self): """Retrieve latest state.""" if self._sensor in ("audio_connections", "video_connections"): if not self._ipcam.status_data: return self._state = self._ipcam.status_data.get(self._sensor) self._unit = "Connections" else: self._state, self._unit = self._ipcam.export_sensor(self._sensor) @property def icon(self): """Return the icon for the sensor.""" if self._sensor == "battery_level" and self._state is not None: return icon_for_battery_level(int(self._state)) return ICON_MAP.get(self._sensor, "mdi:eye")
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/android_ip_webcam/sensor.py
"""Support for particulate matter sensors connected to a serial port.""" import logging from pmsensor import serial_pm as pm import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) CONF_BRAND = "brand" CONF_SERIAL_DEVICE = "serial_device" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_BRAND): cv.string, vol.Required(CONF_SERIAL_DEVICE): cv.string, vol.Optional(CONF_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the available PM sensors.""" try: coll = pm.PMDataCollector( config.get(CONF_SERIAL_DEVICE), pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)] ) except KeyError: _LOGGER.error( "Brand %s not supported\n supported brands: %s", config.get(CONF_BRAND), pm.SUPPORTED_SENSORS.keys(), ) return except OSError as err: _LOGGER.error( "Could not open serial connection to %s (%s)", config.get(CONF_SERIAL_DEVICE), err, ) return dev = [] for pmname in coll.supported_values(): if config.get(CONF_NAME) is not None: name = "{} PM{}".format(config.get(CONF_NAME), pmname) else: name = f"PM{pmname}" dev.append(ParticulateMatterSensor(coll, name, pmname)) add_entities(dev) class ParticulateMatterSensor(Entity): """Representation of an Particulate matter sensor.""" def __init__(self, pmDataCollector, name, pmname): """Initialize a new PM sensor.""" self._name = name self._pmname = pmname self._state = None self._collector = pmDataCollector @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER def update(self): """Read from sensor and update the state.""" _LOGGER.debug("Reading data from PM sensor") try: self._state = self._collector.read_data()[self._pmname] except KeyError: _LOGGER.error("Could not read PM%s value", self._pmname)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/serial_pm/sensor.py
"""Support for Vera devices.""" import asyncio from collections import defaultdict import logging from typing import Any, Dict, Generic, List, Optional, Type, TypeVar import pyvera as veraApi from requests.exceptions import RequestException import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ARMED, ATTR_BATTERY_LEVEL, ATTR_LAST_TRIP_TIME, ATTR_TRIPPED, CONF_EXCLUDE, CONF_LIGHTS, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import convert, slugify from homeassistant.util.dt import utc_from_timestamp from .common import ( ControllerData, SubscriptionRegistry, get_configured_platforms, get_controller_data, set_controller_data, ) from .config_flow import fix_device_id_list, new_options from .const import ( ATTR_CURRENT_ENERGY_KWH, ATTR_CURRENT_POWER_W, CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN, VERA_ID_FORMAT, ) _LOGGER = logging.getLogger(__name__) VERA_ID_LIST_SCHEMA = vol.Schema([int]) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_CONTROLLER): cv.url, vol.Optional(CONF_EXCLUDE, default=[]): VERA_ID_LIST_SCHEMA, vol.Optional(CONF_LIGHTS, default=[]): VERA_ID_LIST_SCHEMA, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, base_config: dict) -> bool: """Set up for Vera controllers.""" hass.data[DOMAIN] = {} config = base_config.get(DOMAIN) if not config: return True hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config, ) ) return True async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Do setup of vera.""" # Use options entered during initial config flow or provided from configuration.yml if config_entry.data.get(CONF_LIGHTS) or config_entry.data.get(CONF_EXCLUDE): hass.config_entries.async_update_entry( entry=config_entry, data=config_entry.data, options=new_options( config_entry.data.get(CONF_LIGHTS, []), config_entry.data.get(CONF_EXCLUDE, []), ), ) saved_light_ids = config_entry.options.get(CONF_LIGHTS, []) saved_exclude_ids = config_entry.options.get(CONF_EXCLUDE, []) base_url = config_entry.data[CONF_CONTROLLER] light_ids = fix_device_id_list(saved_light_ids) exclude_ids = fix_device_id_list(saved_exclude_ids) # If the ids were corrected. Update the config entry. if light_ids != saved_light_ids or exclude_ids != saved_exclude_ids: hass.config_entries.async_update_entry( entry=config_entry, options=new_options(light_ids, exclude_ids) ) # Initialize the Vera controller. subscription_registry = SubscriptionRegistry(hass) controller = veraApi.VeraController(base_url, subscription_registry) await hass.async_add_executor_job(controller.start) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.stop) try: all_devices = await hass.async_add_executor_job(controller.get_devices) all_scenes = await hass.async_add_executor_job(controller.get_scenes) except RequestException as exception: # There was a network related error connecting to the Vera controller. _LOGGER.exception("Error communicating with Vera API") raise ConfigEntryNotReady from exception # Exclude devices unwanted by user. devices = [device for device in all_devices if device.device_id not in exclude_ids] vera_devices = defaultdict(list) for device in devices: device_type = map_vera_device(device, light_ids) if device_type is not None: vera_devices[device_type].append(device) vera_scenes = [] for scene in all_scenes: vera_scenes.append(scene) controller_data = ControllerData( controller=controller, devices=vera_devices, scenes=vera_scenes, config_entry=config_entry, ) set_controller_data(hass, config_entry, controller_data) # Forward the config data to the necessary platforms. for platform in get_configured_platforms(controller_data): hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, platform) ) return True async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload Withings config entry.""" controller_data: ControllerData = get_controller_data(hass, config_entry) tasks = [ hass.config_entries.async_forward_entry_unload(config_entry, platform) for platform in get_configured_platforms(controller_data) ] tasks.append(hass.async_add_executor_job(controller_data.controller.stop)) await asyncio.gather(*tasks) return True def map_vera_device(vera_device: veraApi.VeraDevice, remap: List[int]) -> str: """Map vera classes to Home Assistant types.""" type_map = { veraApi.VeraDimmer: "light", veraApi.VeraBinarySensor: "binary_sensor", veraApi.VeraSensor: "sensor", veraApi.VeraArmableDevice: "switch", veraApi.VeraLock: "lock", veraApi.VeraThermostat: "climate", veraApi.VeraCurtain: "cover", veraApi.VeraSceneController: "sensor", veraApi.VeraSwitch: "switch", } def map_special_case(instance_class: Type, entity_type: str) -> str: if instance_class is veraApi.VeraSwitch and vera_device.device_id in remap: return "light" return entity_type return next( iter( map_special_case(instance_class, entity_type) for instance_class, entity_type in type_map.items() if isinstance(vera_device, instance_class) ), None, ) DeviceType = TypeVar("DeviceType", bound=veraApi.VeraDevice) class VeraDevice(Generic[DeviceType], Entity): """Representation of a Vera device entity.""" def __init__(self, vera_device: DeviceType, controller_data: ControllerData): """Initialize the device.""" self.vera_device = vera_device self.controller = controller_data.controller self._name = self.vera_device.name # Append device id to prevent name clashes in HA. self.vera_id = VERA_ID_FORMAT.format( slugify(vera_device.name), vera_device.vera_device_id ) if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID): self._unique_id = str(self.vera_device.vera_device_id) else: self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}" async def async_added_to_hass(self) -> None: """Subscribe to updates.""" self.controller.register(self.vera_device, self._update_callback) def _update_callback(self, _device: DeviceType) -> None: """Update the state.""" self.schedule_update_ha_state(True) @property def name(self) -> str: """Return the name of the device.""" return self._name @property def should_poll(self) -> bool: """Get polling requirement from vera device.""" return self.vera_device.should_poll @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the state attributes of the device.""" attr = {} if self.vera_device.has_battery: attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level if self.vera_device.is_armable: armed = self.vera_device.is_armed attr[ATTR_ARMED] = "True" if armed else "False" if self.vera_device.is_trippable: last_tripped = self.vera_device.last_trip if last_tripped is not None: utc_time = utc_from_timestamp(int(last_tripped)) attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() else: attr[ATTR_LAST_TRIP_TIME] = None tripped = self.vera_device.is_tripped attr[ATTR_TRIPPED] = "True" if tripped else "False" power = self.vera_device.power if power: attr[ATTR_CURRENT_POWER_W] = convert(power, float, 0.0) energy = self.vera_device.energy if energy: attr[ATTR_CURRENT_ENERGY_KWH] = convert(energy, float, 0.0) attr["Vera Device Id"] = self.vera_device.vera_device_id return attr @property def unique_id(self) -> str: """Return a unique ID. The Vera assigns a unique and immutable ID number to each device. """ return self._unique_id
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/vera/__init__.py
"""Config flow to configure the OVO Energy integration.""" import aiohttp from ovoenergy.ovoenergy import OVOEnergy import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigFlow from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_ACCOUNT_ID, DOMAIN # pylint: disable=unused-import USER_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a OVO Energy config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL async def async_step_user(self, user_input=None): """Handle a flow initiated by the user.""" errors = {} if user_input is not None: client = OVOEnergy() try: authenticated = await client.authenticate( user_input[CONF_USERNAME], user_input[CONF_PASSWORD] ) except aiohttp.ClientError: errors["base"] = "cannot_connect" else: if authenticated: await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() return self.async_create_entry( title=client.account_id, data={ CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_ACCOUNT_ID: client.account_id, }, ) errors["base"] = "invalid_auth" return self.async_show_form( step_id="user", data_schema=USER_SCHEMA, errors=errors )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/ovo_energy/config_flow.py
"""UpCloud constants.""" from datetime import timedelta DOMAIN = "upcloud" DEFAULT_SCAN_INTERVAL = timedelta(seconds=60) CONFIG_ENTRY_UPDATE_SIGNAL_TEMPLATE = f"{DOMAIN}_config_entry_update:" "{}"
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/upcloud/const.py
"""Support for OpenTherm Gateway sensors.""" import logging from homeassistant.components.sensor import ENTITY_ID_FORMAT from homeassistant.const import CONF_ID from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity, async_generate_entity_id from . import DOMAIN from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, SENSOR_INFO _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the OpenTherm Gateway sensors.""" sensors = [] for var, info in SENSOR_INFO.items(): device_class = info[0] unit = info[1] friendly_name_format = info[2] sensors.append( OpenThermSensor( hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]], var, device_class, unit, friendly_name_format, ) ) async_add_entities(sensors) class OpenThermSensor(Entity): """Representation of an OpenTherm Gateway sensor.""" def __init__(self, gw_dev, var, device_class, unit, friendly_name_format): """Initialize the OpenTherm Gateway sensor.""" self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, f"{var}_{gw_dev.gw_id}", hass=gw_dev.hass ) self._gateway = gw_dev self._var = var self._value = None self._device_class = device_class self._unit = unit self._friendly_name = friendly_name_format.format(gw_dev.name) self._unsub_updates = None async def async_added_to_hass(self): """Subscribe to updates from the component.""" _LOGGER.debug("Added OpenTherm Gateway sensor %s", self._friendly_name) self._unsub_updates = async_dispatcher_connect( self.hass, self._gateway.update_signal, self.receive_report ) async def async_will_remove_from_hass(self): """Unsubscribe from updates from the component.""" _LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._friendly_name) self._unsub_updates() @property def available(self): """Return availability of the sensor.""" return self._value is not None @property def entity_registry_enabled_default(self): """Disable sensors by default.""" return False @callback def receive_report(self, status): """Handle status updates from the component.""" value = status.get(self._var) if isinstance(value, float): value = f"{value:2.1f}" self._value = value self.async_write_ha_state() @property def name(self): """Return the friendly name of the sensor.""" return self._friendly_name @property def device_info(self): """Return device info.""" return { "identifiers": {(DOMAIN, self._gateway.gw_id)}, "name": self._gateway.name, "manufacturer": "Schelte Bron", "model": "OpenTherm Gateway", "sw_version": self._gateway.gw_version, } @property def unique_id(self): """Return a unique ID.""" return f"{self._gateway.gw_id}-{self._var}" @property def device_class(self): """Return the device class.""" return self._device_class @property def state(self): """Return the state of the device.""" return self._value @property def unit_of_measurement(self): """Return the unit of measurement.""" return self._unit @property def should_poll(self): """Return False because entity pushes its state.""" return False
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/opentherm_gw/sensor.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This module contains simple input/output related functionality that is not part of a larger framework or standard. """ import pickle __all__ = ['fnpickle', 'fnunpickle'] def fnunpickle(fileorname, number=0): """ Unpickle pickled objects from a specified file and return the contents. Parameters ---------- fileorname : str or file-like The file name or file from which to unpickle objects. If a file object, it should have been opened in binary mode. number : int If 0, a single object will be returned (the first in the file). If >0, this specifies the number of objects to be unpickled, and a list will be returned with exactly that many objects. If <0, all objects in the file will be unpickled and returned as a list. Raises ------ EOFError If ``number`` is >0 and there are fewer than ``number`` objects in the pickled file. Returns ------- contents : obj or list If ``number`` is 0, this is a individual object - the first one unpickled from the file. Otherwise, it is a list of objects unpickled from the file. """ if isinstance(fileorname, str): f = open(fileorname, 'rb') close = True else: f = fileorname close = False try: if number > 0: # get that number res = [] for i in range(number): res.append(pickle.load(f)) elif number < 0: # get all objects res = [] eof = False while not eof: try: res.append(pickle.load(f)) except EOFError: eof = True else: # number==0 res = pickle.load(f) finally: if close: f.close() return res def fnpickle(object, fileorname, protocol=None, append=False): """Pickle an object to a specified file. Parameters ---------- object The python object to pickle. fileorname : str or file-like The filename or file into which the `object` should be pickled. If a file object, it should have been opened in binary mode. protocol : int or None Pickle protocol to use - see the :mod:`pickle` module for details on these options. If None, the most recent protocol will be used. append : bool If True, the object is appended to the end of the file, otherwise the file will be overwritten (if a file object is given instead of a file name, this has no effect). """ if protocol is None: protocol = pickle.HIGHEST_PROTOCOL if isinstance(fileorname, str): f = open(fileorname, 'ab' if append else 'wb') close = True else: f = fileorname close = False try: pickle.dump(object, f, protocol=protocol) finally: if close: f.close()
# Tests related to writing dask arrays to FITS files in an efficient way import pytest import numpy as np from astropy.io import fits from astropy.io.fits import ImageHDU, PrimaryHDU da = pytest.importorskip("dask.array") @pytest.fixture def dask_array_in_mem(): return da.random.uniform(-1000, 1000, (1322, 755)).rechunk((59, 55)) def test_construct_image_hdu(dask_array_in_mem): hdu = ImageHDU(data=dask_array_in_mem) assert isinstance(hdu.data, da.Array) def test_construct_hdulist(dask_array_in_mem): hdu = ImageHDU(data=dask_array_in_mem) hdulist = fits.HDUList([hdu]) assert isinstance(hdulist[0].data, da.Array) def test_save_primary_hdu(dask_array_in_mem, tmp_path): # Saving a Primary HDU directly filename = tmp_path / 'test.fits' hdu = PrimaryHDU(data=dask_array_in_mem) hdu.writeto(filename) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, dask_array_in_mem.compute()) def test_save_image_hdu(dask_array_in_mem, tmp_path): # Saving an image HDU directly filename = tmp_path / 'test.fits' hdu = ImageHDU(data=dask_array_in_mem) hdu.writeto(filename) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[1].data, np.ndarray) np.testing.assert_allclose(hdulist_new[1].data, dask_array_in_mem.compute()) def test_save_hdulist(dask_array_in_mem, tmp_path): # Saving an HDUList filename = tmp_path / 'test.fits' hdu1 = PrimaryHDU(data=dask_array_in_mem) hdu2 = ImageHDU(data=np.random.random((128, 128))) hdu3 = ImageHDU(data=dask_array_in_mem * 2) hdulist = fits.HDUList([hdu1, hdu2, hdu3]) assert isinstance(hdulist[0].data, da.Array) hdulist.writeto(filename) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, dask_array_in_mem.compute()) assert isinstance(hdulist_new[1].data, np.ndarray) np.testing.assert_allclose(hdulist_new[1].data, hdu2.data) assert isinstance(hdulist_new[2].data, np.ndarray) np.testing.assert_allclose(hdulist_new[2].data, dask_array_in_mem.compute() * 2) def test_long_header(dask_array_in_mem, tmp_path): # Make sure things work correctly if there is a long header in the HDU. filename = tmp_path / 'test.fits' # NOTE: we deliberately set up a long header here rather than add the # keys one by one to hdu.header as adding the header in one go used to # cause issues, so this acts as a regression test. header = fits.Header() for index in range(2048): header[f'KEY{index:x}'] = 0. hdu = PrimaryHDU(data=dask_array_in_mem, header=header) hdu.writeto(filename) with fits.open(filename) as hdulist_new: assert len(hdulist_new[0].header) == 2053 assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, dask_array_in_mem.compute()) VALID_DTYPES = ('>i2', '<i2', '>i4', '<i4', '>i8', '<i8', '>f4', '<f4', '>f8', '<f8') @pytest.mark.parametrize('dtype', VALID_DTYPES) def test_dtypes(dask_array_in_mem, tmp_path, dtype): filename = tmp_path / 'test.fits' array = dask_array_in_mem.astype(dtype) hdu = PrimaryHDU(data=array) hdu.writeto(filename) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, array.compute()) def test_scaled(dask_array_in_mem, tmp_path): filename = tmp_path / 'test.fits' hdu = PrimaryHDU(data=dask_array_in_mem) hdu.scale('int32', bzero=-1000, bscale=1e-6) hdu.writeto(filename) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, dask_array_in_mem.compute(), atol=1e-5) def test_scaled_minmax(dask_array_in_mem, tmp_path): filename = tmp_path / 'test.fits' hdu = PrimaryHDU(data=dask_array_in_mem) hdu.scale('int32',option='minmax') hdu.writeto(filename) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, dask_array_in_mem.compute(), atol=1e-5) def test_append(dask_array_in_mem, tmp_path): # Test append mode filename = tmp_path / 'test.fits' fits.append(filename, dask_array_in_mem) fits.append(filename, np.arange(10)) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, dask_array_in_mem.compute()) assert isinstance(hdulist_new[1].data, np.ndarray) np.testing.assert_allclose(hdulist_new[1].data, np.arange(10)) # @pytest.mark.parametrize('mode', ['rb+', 'ab', 'ab+', 'wb', 'wb+']) @pytest.mark.parametrize('mode', ['wb', 'wb+']) def test_file_handle(mode, dask_array_in_mem, tmp_path): filename = tmp_path / 'test.fits' hdu1 = PrimaryHDU(data=dask_array_in_mem) hdu2 = ImageHDU(data=np.arange(10)) hdulist = fits.HDUList([hdu1, hdu2]) with filename.open(mode=mode) as fp: hdulist.writeto(fp) with fits.open(filename) as hdulist_new: assert isinstance(hdulist_new[0].data, np.ndarray) np.testing.assert_allclose(hdulist_new[0].data, dask_array_in_mem.compute()) assert isinstance(hdulist_new[1].data, np.ndarray) np.testing.assert_allclose(hdulist_new[1].data, np.arange(10))
dhomeier/astropy
astropy/io/fits/tests/test_image_dask.py
astropy/io/misc/pickle_helpers.py
import pandas as pd from pandas.core.internals import ObjectBlock from .base import BaseExtensionTests class BaseCastingTests(BaseExtensionTests): """Casting to and from ExtensionDtypes""" def test_astype_object_series(self, all_data): ser = pd.Series({"A": all_data}) result = ser.astype(object) assert isinstance(result._data.blocks[0], ObjectBlock) def test_tolist(self, data): result = pd.Series(data).tolist() expected = list(data) assert result == expected def test_astype_str(self, data): result = pd.Series(data[:5]).astype(str) expected = pd.Series(data[:5].astype(str)) self.assert_series_equal(result, expected)
# -*- coding: utf-8 -*- from __future__ import print_function import pytest from datetime import datetime import re from pandas.compat import (zip, range, lrange, StringIO) from pandas import (DataFrame, Series, Index, date_range, compat, Timestamp) import pandas as pd from numpy import nan import numpy as np from pandas.util.testing import (assert_series_equal, assert_frame_equal) import pandas.util.testing as tm from pandas.tests.frame.common import TestData class TestDataFrameReplace(TestData): def test_replace_inplace(self): self.tsframe['A'][:5] = nan self.tsframe['A'][-5:] = nan tsframe = self.tsframe.copy() tsframe.replace(nan, 0, inplace=True) assert_frame_equal(tsframe, self.tsframe.fillna(0)) # mixed type mf = self.mixed_frame mf.iloc[5:20, mf.columns.get_loc('foo')] = nan mf.iloc[-10:, mf.columns.get_loc('A')] = nan result = self.mixed_frame.replace(np.nan, 0) expected = self.mixed_frame.fillna(value=0) assert_frame_equal(result, expected) tsframe = self.tsframe.copy() tsframe.replace([nan], [0], inplace=True) assert_frame_equal(tsframe, self.tsframe.fillna(0)) def test_regex_replace_scalar(self): obj = {'a': list('ab..'), 'b': list('efgh')} dfobj = DataFrame(obj) mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # simplest cases # regex -> value # obj frame res = dfobj.replace(r'\s*\.\s*', nan, regex=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.replace(r'\s*\.\s*', nan, regex=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) # everything with compiled regexs as well res = dfobj.replace(re.compile(r'\s*\.\s*'), nan, regex=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.replace(re.compile(r'\s*\.\s*'), nan, regex=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1') objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1') mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) res = dfmix.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1') mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) res = dfmix.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1') mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) def test_regex_replace_scalar_inplace(self): obj = {'a': list('ab..'), 'b': list('efgh')} dfobj = DataFrame(obj) mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # simplest cases # regex -> value # obj frame res = dfobj.copy() res.replace(r'\s*\.\s*', nan, regex=True, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(r'\s*\.\s*', nan, regex=True, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True, inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True, inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) # everything with compiled regexs as well res = dfobj.copy() res.replace(re.compile(r'\s*\.\s*'), nan, regex=True, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(re.compile(r'\s*\.\s*'), nan, regex=True, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1', regex=True, inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1', regex=True, inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) res = dfobj.copy() res.replace(regex=r'\s*\.\s*', value=nan, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(regex=r'\s*\.\s*', value=nan, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1', inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1', inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) # everything with compiled regexs as well res = dfobj.copy() res.replace(regex=re.compile(r'\s*\.\s*'), value=nan, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(regex=re.compile(r'\s*\.\s*'), value=nan, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1', inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1', inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) def test_regex_replace_list_obj(self): obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')} dfobj = DataFrame(obj) # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'e|f|g'] values = [nan, 'crap'] res = dfobj.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap'] * 3 + ['h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(e|f|g)'] values = [r'\1\1', r'\1_crap'] res = dfobj.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e_crap', 'f_crap', 'g_crap', 'h'], 'c': ['h', 'e_crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.replace(value=values, regex=to_replace_res) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) def test_regex_replace_list_obj_inplace(self): # same as above with inplace=True # lists of regexes and values obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')} dfobj = DataFrame(obj) # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'e|f|g'] values = [nan, 'crap'] res = dfobj.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap'] * 3 + ['h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(e|f|g)'] values = [r'\1\1', r'\1_crap'] res = dfobj.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e_crap', 'f_crap', 'g_crap', 'h'], 'c': ['h', 'e_crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.copy() res.replace(value=values, regex=to_replace_res, inplace=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) def test_regex_replace_list_mixed(self): # mixed frame to make sure this doesn't break things mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'a'] values = [nan, 'crap'] mix2 = {'a': lrange(4), 'b': list('ab..'), 'c': list('halo')} dfmix2 = DataFrame(mix2) res = dfmix2.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': mix2['a'], 'b': ['crap', 'b', nan, nan], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(a|b)'] values = [r'\1\1', r'\1_crap'] res = dfmix.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a_crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.replace(regex=to_replace_res, value=values) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) def test_regex_replace_list_mixed_inplace(self): mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # the same inplace # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'a'] values = [nan, 'crap'] res = dfmix.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b', nan, nan]}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(a|b)'] values = [r'\1\1', r'\1_crap'] res = dfmix.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a_crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.copy() res.replace(regex=to_replace_res, value=values, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) def test_regex_replace_dict_mixed(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} dfmix = DataFrame(mix) # dicts # single dict {re1: v1}, search the whole frame # need test for this... # list of dicts {re1: v1, re2: v2, ..., re3: v3}, search the whole # frame res = dfmix.replace({'b': r'\s*\.\s*'}, {'b': nan}, regex=True) res2 = dfmix.copy() res2.replace({'b': r'\s*\.\s*'}, {'b': nan}, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) # list of dicts {re1: re11, re2: re12, ..., reN: re1N}, search the # whole frame res = dfmix.replace({'b': r'\s*(\.)\s*'}, {'b': r'\1ty'}, regex=True) res2 = dfmix.copy() res2.replace({'b': r'\s*(\.)\s*'}, {'b': r'\1ty'}, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', '.ty', '.ty'], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) res = dfmix.replace(regex={'b': r'\s*(\.)\s*'}, value={'b': r'\1ty'}) res2 = dfmix.copy() res2.replace(regex={'b': r'\s*(\.)\s*'}, value={'b': r'\1ty'}, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', '.ty', '.ty'], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) # scalar -> dict # to_replace regex, {value: value} expec = DataFrame({'a': mix['a'], 'b': [nan, 'b', '.', '.'], 'c': mix['c']}) res = dfmix.replace('a', {'b': nan}, regex=True) res2 = dfmix.copy() res2.replace('a', {'b': nan}, regex=True, inplace=True) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) res = dfmix.replace('a', {'b': nan}, regex=True) res2 = dfmix.copy() res2.replace(regex='a', value={'b': nan}, inplace=True) expec = DataFrame({'a': mix['a'], 'b': [nan, 'b', '.', '.'], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) def test_regex_replace_dict_nested(self): # nested dicts will not work until this is implemented for Series mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} dfmix = DataFrame(mix) res = dfmix.replace({'b': {r'\s*\.\s*': nan}}, regex=True) res2 = dfmix.copy() res4 = dfmix.copy() res2.replace({'b': {r'\s*\.\s*': nan}}, inplace=True, regex=True) res3 = dfmix.replace(regex={'b': {r'\s*\.\s*': nan}}) res4.replace(regex={'b': {r'\s*\.\s*': nan}}, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) assert_frame_equal(res4, expec) def test_regex_replace_dict_nested_gh4115(self): df = pd.DataFrame({'Type': ['Q', 'T', 'Q', 'Q', 'T'], 'tmp': 2}) expected = DataFrame({'Type': [0, 1, 0, 0, 1], 'tmp': 2}) result = df.replace({'Type': {'Q': 0, 'T': 1}}) assert_frame_equal(result, expected) def test_regex_replace_list_to_scalar(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) expec = DataFrame({'a': mix['a'], 'b': np.array([nan] * 4), 'c': [nan, nan, nan, 'd']}) res = df.replace([r'\s*\.\s*', 'a|b'], nan, regex=True) res2 = df.copy() res3 = df.copy() res2.replace([r'\s*\.\s*', 'a|b'], nan, regex=True, inplace=True) res3.replace(regex=[r'\s*\.\s*', 'a|b'], value=nan, inplace=True) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_str_to_numeric(self): # what happens when you try to replace a numeric value with a regex? mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) res = df.replace(r'\s*\.\s*', 0, regex=True) res2 = df.copy() res2.replace(r'\s*\.\s*', 0, inplace=True, regex=True) res3 = df.copy() res3.replace(regex=r'\s*\.\s*', value=0, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', 0, 0], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_regex_list_to_numeric(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) res = df.replace([r'\s*\.\s*', 'b'], 0, regex=True) res2 = df.copy() res2.replace([r'\s*\.\s*', 'b'], 0, regex=True, inplace=True) res3 = df.copy() res3.replace(regex=[r'\s*\.\s*', 'b'], value=0, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 0, 0, 0], 'c': ['a', 0, nan, 'd']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_series_of_regexes(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) s1 = Series({'b': r'\s*\.\s*'}) s2 = Series({'b': nan}) res = df.replace(s1, s2, regex=True) res2 = df.copy() res2.replace(s1, s2, inplace=True, regex=True) res3 = df.copy() res3.replace(regex=s1, value=s2, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_numeric_to_object_conversion(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) expec = DataFrame({'a': ['a', 1, 2, 3], 'b': mix['b'], 'c': mix['c']}) res = df.replace(0, 'a') assert_frame_equal(res, expec) assert res.a.dtype == np.object_ def test_replace_regex_metachar(self): metachars = '[]', '()', r'\d', r'\w', r'\s' for metachar in metachars: df = DataFrame({'a': [metachar, 'else']}) result = df.replace({'a': {metachar: 'paren'}}) expected = DataFrame({'a': ['paren', 'else']}) assert_frame_equal(result, expected) def test_replace(self): self.tsframe['A'][:5] = nan self.tsframe['A'][-5:] = nan zero_filled = self.tsframe.replace(nan, -1e8) assert_frame_equal(zero_filled, self.tsframe.fillna(-1e8)) assert_frame_equal(zero_filled.replace(-1e8, nan), self.tsframe) self.tsframe['A'][:5] = nan self.tsframe['A'][-5:] = nan self.tsframe['B'][:5] = -1e8 # empty df = DataFrame(index=['a', 'b']) assert_frame_equal(df, df.replace(5, 7)) # GH 11698 # test for mixed data types. df = pd.DataFrame([('-', pd.to_datetime('20150101')), ('a', pd.to_datetime('20150102'))]) df1 = df.replace('-', np.nan) expected_df = pd.DataFrame([(np.nan, pd.to_datetime('20150101')), ('a', pd.to_datetime('20150102'))]) assert_frame_equal(df1, expected_df) def test_replace_list(self): obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')} dfobj = DataFrame(obj) # lists of regexes and values # list of [v1, v2, ..., vN] -> [v1, v2, ..., vN] to_replace_res = [r'.', r'e'] values = [nan, 'crap'] res = dfobj.replace(to_replace_res, values) expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [v1, v2, ..., vN] -> [v1, v2, .., vN] to_replace_res = [r'.', r'f'] values = [r'..', r'crap'] res = dfobj.replace(to_replace_res, values) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e', 'crap', 'g', 'h'], 'c': ['h', 'e', 'l', 'o']}) assert_frame_equal(res, expec) def test_replace_series_dict(self): # from GH 3064 df = DataFrame({'zero': {'a': 0.0, 'b': 1}, 'one': {'a': 2.0, 'b': 0}}) result = df.replace(0, {'zero': 0.5, 'one': 1.0}) expected = DataFrame( {'zero': {'a': 0.5, 'b': 1}, 'one': {'a': 2.0, 'b': 1.0}}) assert_frame_equal(result, expected) result = df.replace(0, df.mean()) assert_frame_equal(result, expected) # series to series/dict df = DataFrame({'zero': {'a': 0.0, 'b': 1}, 'one': {'a': 2.0, 'b': 0}}) s = Series({'zero': 0.0, 'one': 2.0}) result = df.replace(s, {'zero': 0.5, 'one': 1.0}) expected = DataFrame( {'zero': {'a': 0.5, 'b': 1}, 'one': {'a': 1.0, 'b': 0.0}}) assert_frame_equal(result, expected) result = df.replace(s, df.mean()) assert_frame_equal(result, expected) def test_replace_convert(self): # gh 3907 df = DataFrame([['foo', 'bar', 'bah'], ['bar', 'foo', 'bah']]) m = {'foo': 1, 'bar': 2, 'bah': 3} rep = df.replace(m) expec = Series([np.int64] * 3) res = rep.dtypes assert_series_equal(expec, res) def test_replace_mixed(self): mf = self.mixed_frame mf.iloc[5:20, mf.columns.get_loc('foo')] = nan mf.iloc[-10:, mf.columns.get_loc('A')] = nan result = self.mixed_frame.replace(np.nan, -18) expected = self.mixed_frame.fillna(value=-18) assert_frame_equal(result, expected) assert_frame_equal(result.replace(-18, nan), self.mixed_frame) result = self.mixed_frame.replace(np.nan, -1e8) expected = self.mixed_frame.fillna(value=-1e8) assert_frame_equal(result, expected) assert_frame_equal(result.replace(-1e8, nan), self.mixed_frame) # int block upcasting df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0, 1], dtype='int64')}) expected = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0.5, 1], dtype='float64')}) result = df.replace(0, 0.5) assert_frame_equal(result, expected) df.replace(0, 0.5, inplace=True) assert_frame_equal(df, expected) # int block splitting df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0, 1], dtype='int64'), 'C': Series([1, 2], dtype='int64')}) expected = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0.5, 1], dtype='float64'), 'C': Series([1, 2], dtype='int64')}) result = df.replace(0, 0.5) assert_frame_equal(result, expected) # to object block upcasting df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0, 1], dtype='int64')}) expected = DataFrame({'A': Series([1, 'foo'], dtype='object'), 'B': Series([0, 1], dtype='int64')}) result = df.replace(2, 'foo') assert_frame_equal(result, expected) expected = DataFrame({'A': Series(['foo', 'bar'], dtype='object'), 'B': Series([0, 'foo'], dtype='object')}) result = df.replace([1, 2], ['foo', 'bar']) assert_frame_equal(result, expected) # test case from df = DataFrame({'A': Series([3, 0], dtype='int64'), 'B': Series([0, 3], dtype='int64')}) result = df.replace(3, df.mean().to_dict()) expected = df.copy().astype('float64') m = df.mean() expected.iloc[0, 0] = m[0] expected.iloc[1, 1] = m[1] assert_frame_equal(result, expected) def test_replace_simple_nested_dict(self): df = DataFrame({'col': range(1, 5)}) expected = DataFrame({'col': ['a', 2, 3, 'b']}) result = df.replace({'col': {1: 'a', 4: 'b'}}) assert_frame_equal(expected, result) # in this case, should be the same as the not nested version result = df.replace({1: 'a', 4: 'b'}) assert_frame_equal(expected, result) def test_replace_simple_nested_dict_with_nonexistent_value(self): df = DataFrame({'col': range(1, 5)}) expected = DataFrame({'col': ['a', 2, 3, 'b']}) result = df.replace({-1: '-', 1: 'a', 4: 'b'}) assert_frame_equal(expected, result) result = df.replace({'col': {-1: '-', 1: 'a', 4: 'b'}}) assert_frame_equal(expected, result) def test_replace_value_is_none(self): orig_value = self.tsframe.iloc[0, 0] orig2 = self.tsframe.iloc[1, 0] self.tsframe.iloc[0, 0] = nan self.tsframe.iloc[1, 0] = 1 result = self.tsframe.replace(to_replace={nan: 0}) expected = self.tsframe.T.replace(to_replace={nan: 0}).T assert_frame_equal(result, expected) result = self.tsframe.replace(to_replace={nan: 0, 1: -1e8}) tsframe = self.tsframe.copy() tsframe.iloc[0, 0] = 0 tsframe.iloc[1, 0] = -1e8 expected = tsframe assert_frame_equal(expected, result) self.tsframe.iloc[0, 0] = orig_value self.tsframe.iloc[1, 0] = orig2 def test_replace_for_new_dtypes(self): # dtypes tsframe = self.tsframe.copy().astype(np.float32) tsframe['A'][:5] = nan tsframe['A'][-5:] = nan zero_filled = tsframe.replace(nan, -1e8) assert_frame_equal(zero_filled, tsframe.fillna(-1e8)) assert_frame_equal(zero_filled.replace(-1e8, nan), tsframe) tsframe['A'][:5] = nan tsframe['A'][-5:] = nan tsframe['B'][:5] = -1e8 b = tsframe['B'] b[b == -1e8] = nan tsframe['B'] = b result = tsframe.fillna(method='bfill') assert_frame_equal(result, tsframe.fillna(method='bfill')) def test_replace_dtypes(self): # int df = DataFrame({'ints': [1, 2, 3]}) result = df.replace(1, 0) expected = DataFrame({'ints': [0, 2, 3]}) assert_frame_equal(result, expected) df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int32) result = df.replace(1, 0) expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int32) assert_frame_equal(result, expected) df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int16) result = df.replace(1, 0) expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int16) assert_frame_equal(result, expected) # bools df = DataFrame({'bools': [True, False, True]}) result = df.replace(False, True) assert result.values.all() # complex blocks df = DataFrame({'complex': [1j, 2j, 3j]}) result = df.replace(1j, 0j) expected = DataFrame({'complex': [0j, 2j, 3j]}) assert_frame_equal(result, expected) # datetime blocks prev = datetime.today() now = datetime.today() df = DataFrame({'datetime64': Index([prev, now, prev])}) result = df.replace(prev, now) expected = DataFrame({'datetime64': Index([now] * 3)}) assert_frame_equal(result, expected) def test_replace_input_formats_listlike(self): # both dicts to_rep = {'A': np.nan, 'B': 0, 'C': ''} values = {'A': 0, 'B': -1, 'C': 'missing'} df = DataFrame({'A': [np.nan, 0, np.inf], 'B': [0, 2, 5], 'C': ['', 'asdf', 'fd']}) filled = df.replace(to_rep, values) expected = {} for k, v in compat.iteritems(df): expected[k] = v.replace(to_rep[k], values[k]) assert_frame_equal(filled, DataFrame(expected)) result = df.replace([0, 2, 5], [5, 2, 0]) expected = DataFrame({'A': [np.nan, 5, np.inf], 'B': [5, 2, 0], 'C': ['', 'asdf', 'fd']}) assert_frame_equal(result, expected) # scalar to dict values = {'A': 0, 'B': -1, 'C': 'missing'} df = DataFrame({'A': [np.nan, 0, np.nan], 'B': [0, 2, 5], 'C': ['', 'asdf', 'fd']}) filled = df.replace(np.nan, values) expected = {} for k, v in compat.iteritems(df): expected[k] = v.replace(np.nan, values[k]) assert_frame_equal(filled, DataFrame(expected)) # list to list to_rep = [np.nan, 0, ''] values = [-2, -1, 'missing'] result = df.replace(to_rep, values) expected = df.copy() for i in range(len(to_rep)): expected.replace(to_rep[i], values[i], inplace=True) assert_frame_equal(result, expected) pytest.raises(ValueError, df.replace, to_rep, values[1:]) def test_replace_input_formats_scalar(self): df = DataFrame({'A': [np.nan, 0, np.inf], 'B': [0, 2, 5], 'C': ['', 'asdf', 'fd']}) # dict to scalar to_rep = {'A': np.nan, 'B': 0, 'C': ''} filled = df.replace(to_rep, 0) expected = {} for k, v in compat.iteritems(df): expected[k] = v.replace(to_rep[k], 0) assert_frame_equal(filled, DataFrame(expected)) pytest.raises(TypeError, df.replace, to_rep, [np.nan, 0, '']) # list to scalar to_rep = [np.nan, 0, ''] result = df.replace(to_rep, -1) expected = df.copy() for i in range(len(to_rep)): expected.replace(to_rep[i], -1, inplace=True) assert_frame_equal(result, expected) def test_replace_limit(self): pass def test_replace_dict_no_regex(self): answer = Series({0: 'Strongly Agree', 1: 'Agree', 2: 'Neutral', 3: 'Disagree', 4: 'Strongly Disagree'}) weights = {'Agree': 4, 'Disagree': 2, 'Neutral': 3, 'Strongly Agree': 5, 'Strongly Disagree': 1} expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1}) result = answer.replace(weights) assert_series_equal(result, expected) def test_replace_series_no_regex(self): answer = Series({0: 'Strongly Agree', 1: 'Agree', 2: 'Neutral', 3: 'Disagree', 4: 'Strongly Disagree'}) weights = Series({'Agree': 4, 'Disagree': 2, 'Neutral': 3, 'Strongly Agree': 5, 'Strongly Disagree': 1}) expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1}) result = answer.replace(weights) assert_series_equal(result, expected) def test_replace_dict_tuple_list_ordering_remains_the_same(self): df = DataFrame(dict(A=[nan, 1])) res1 = df.replace(to_replace={nan: 0, 1: -1e8}) res2 = df.replace(to_replace=(1, nan), value=[-1e8, 0]) res3 = df.replace(to_replace=[1, nan], value=[-1e8, 0]) expected = DataFrame({'A': [0, -1e8]}) assert_frame_equal(res1, res2) assert_frame_equal(res2, res3) assert_frame_equal(res3, expected) def test_replace_doesnt_replace_without_regex(self): raw = """fol T_opp T_Dir T_Enh 0 1 0 0 vo 1 2 vr 0 0 2 2 0 0 0 3 3 0 bt 0""" df = pd.read_csv(StringIO(raw), sep=r'\s+') res = df.replace({r'\D': 1}) assert_frame_equal(df, res) def test_replace_bool_with_string(self): df = DataFrame({'a': [True, False], 'b': list('ab')}) result = df.replace(True, 'a') expected = DataFrame({'a': ['a', False], 'b': df.b}) assert_frame_equal(result, expected) def test_replace_pure_bool_with_string_no_op(self): df = DataFrame(np.random.rand(2, 2) > 0.5) result = df.replace('asdf', 'fdsa') assert_frame_equal(df, result) def test_replace_bool_with_bool(self): df = DataFrame(np.random.rand(2, 2) > 0.5) result = df.replace(False, True) expected = DataFrame(np.ones((2, 2), dtype=bool)) assert_frame_equal(result, expected) def test_replace_with_dict_with_bool_keys(self): df = DataFrame({0: [True, False], 1: [False, True]}) with tm.assert_raises_regex(TypeError, 'Cannot compare types .+'): df.replace({'asdf': 'asdb', True: 'yes'}) def test_replace_truthy(self): df = DataFrame({'a': [True, True]}) r = df.replace([np.inf, -np.inf], np.nan) e = df assert_frame_equal(r, e) def test_replace_int_to_int_chain(self): df = DataFrame({'a': lrange(1, 5)}) with tm.assert_raises_regex(ValueError, "Replacement not allowed .+"): df.replace({'a': dict(zip(range(1, 5), range(2, 6)))}) def test_replace_str_to_str_chain(self): a = np.arange(1, 5) astr = a.astype(str) bstr = np.arange(2, 6).astype(str) df = DataFrame({'a': astr}) with tm.assert_raises_regex(ValueError, "Replacement not allowed .+"): df.replace({'a': dict(zip(astr, bstr))}) def test_replace_swapping_bug(self): df = pd.DataFrame({'a': [True, False, True]}) res = df.replace({'a': {True: 'Y', False: 'N'}}) expect = pd.DataFrame({'a': ['Y', 'N', 'Y']}) assert_frame_equal(res, expect) df = pd.DataFrame({'a': [0, 1, 0]}) res = df.replace({'a': {0: 'Y', 1: 'N'}}) expect = pd.DataFrame({'a': ['Y', 'N', 'Y']}) assert_frame_equal(res, expect) def test_replace_period(self): d = { 'fname': { 'out_augmented_AUG_2011.json': pd.Period(year=2011, month=8, freq='M'), 'out_augmented_JAN_2011.json': pd.Period(year=2011, month=1, freq='M'), 'out_augmented_MAY_2012.json': pd.Period(year=2012, month=5, freq='M'), 'out_augmented_SUBSIDY_WEEK.json': pd.Period(year=2011, month=4, freq='M'), 'out_augmented_AUG_2012.json': pd.Period(year=2012, month=8, freq='M'), 'out_augmented_MAY_2011.json': pd.Period(year=2011, month=5, freq='M'), 'out_augmented_SEP_2013.json': pd.Period(year=2013, month=9, freq='M')}} df = pd.DataFrame(['out_augmented_AUG_2012.json', 'out_augmented_SEP_2013.json', 'out_augmented_SUBSIDY_WEEK.json', 'out_augmented_MAY_2012.json', 'out_augmented_MAY_2011.json', 'out_augmented_AUG_2011.json', 'out_augmented_JAN_2011.json'], columns=['fname']) assert set(df.fname.values) == set(d['fname'].keys()) expected = DataFrame({'fname': [d['fname'][k] for k in df.fname.values]}) result = df.replace(d) assert_frame_equal(result, expected) def test_replace_datetime(self): d = {'fname': {'out_augmented_AUG_2011.json': pd.Timestamp('2011-08'), 'out_augmented_JAN_2011.json': pd.Timestamp('2011-01'), 'out_augmented_MAY_2012.json': pd.Timestamp('2012-05'), 'out_augmented_SUBSIDY_WEEK.json': pd.Timestamp('2011-04'), 'out_augmented_AUG_2012.json': pd.Timestamp('2012-08'), 'out_augmented_MAY_2011.json': pd.Timestamp('2011-05'), 'out_augmented_SEP_2013.json': pd.Timestamp('2013-09')}} df = pd.DataFrame(['out_augmented_AUG_2012.json', 'out_augmented_SEP_2013.json', 'out_augmented_SUBSIDY_WEEK.json', 'out_augmented_MAY_2012.json', 'out_augmented_MAY_2011.json', 'out_augmented_AUG_2011.json', 'out_augmented_JAN_2011.json'], columns=['fname']) assert set(df.fname.values) == set(d['fname'].keys()) expected = DataFrame({'fname': [d['fname'][k] for k in df.fname.values]}) result = df.replace(d) assert_frame_equal(result, expected) def test_replace_datetimetz(self): # GH 11326 # behaving poorly when presented with a datetime64[ns, tz] df = DataFrame({'A': date_range('20130101', periods=3, tz='US/Eastern'), 'B': [0, np.nan, 2]}) result = df.replace(np.nan, 1) expected = DataFrame({'A': date_range('20130101', periods=3, tz='US/Eastern'), 'B': Series([0, 1, 2], dtype='float64')}) assert_frame_equal(result, expected) result = df.fillna(1) assert_frame_equal(result, expected) result = df.replace(0, np.nan) expected = DataFrame({'A': date_range('20130101', periods=3, tz='US/Eastern'), 'B': [np.nan, np.nan, 2]}) assert_frame_equal(result, expected) result = df.replace(Timestamp('20130102', tz='US/Eastern'), Timestamp('20130104', tz='US/Eastern')) expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'), Timestamp('20130104', tz='US/Eastern'), Timestamp('20130103', tz='US/Eastern')], 'B': [0, np.nan, 2]}) assert_frame_equal(result, expected) result = df.copy() result.iloc[1, 0] = np.nan result = result.replace( {'A': pd.NaT}, Timestamp('20130104', tz='US/Eastern')) assert_frame_equal(result, expected) # coerce to object result = df.copy() result.iloc[1, 0] = np.nan result = result.replace( {'A': pd.NaT}, Timestamp('20130104', tz='US/Pacific')) expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'), Timestamp('20130104', tz='US/Pacific'), Timestamp('20130103', tz='US/Eastern')], 'B': [0, np.nan, 2]}) assert_frame_equal(result, expected) result = df.copy() result.iloc[1, 0] = np.nan result = result.replace({'A': np.nan}, Timestamp('20130104')) expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'), Timestamp('20130104'), Timestamp('20130103', tz='US/Eastern')], 'B': [0, np.nan, 2]}) assert_frame_equal(result, expected) def test_replace_with_empty_dictlike(self): # GH 15289 mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) assert_frame_equal(df, df.replace({})) assert_frame_equal(df, df.replace(Series([]))) assert_frame_equal(df, df.replace({'b': {}})) assert_frame_equal(df, df.replace(Series({'b': {}}))) @pytest.mark.parametrize("to_replace, method, expected", [ (0, 'bfill', {'A': [1, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), (nan, 'bfill', {'A': [0, 1, 2], 'B': [5.0, 7.0, 7.0], 'C': ['a', 'b', 'c']}), ('d', 'ffill', {'A': [0, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), ([0, 2], 'bfill', {'A': [1, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), ([1, 2], 'pad', {'A': [0, 0, 0], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), ((1, 2), 'bfill', {'A': [0, 2, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), (['b', 'c'], 'ffill', {'A': [0, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'a', 'a']}), ]) def test_replace_method(self, to_replace, method, expected): # GH 19632 df = DataFrame({'A': [0, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}) result = df.replace(to_replace=to_replace, value=None, method=method) expected = DataFrame(expected) assert_frame_equal(result, expected)
pratapvardhan/pandas
pandas/tests/frame/test_replace.py
pandas/tests/extension/base/casting.py
# -*- coding: utf-8 -*- import numpy as np import pytest from pandas import Index, MultiIndex @pytest.fixture def idx(): # a MultiIndex used to test the general functionality of the # general functionality of this object major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_labels = np.array([0, 0, 1, 2, 3, 3]) minor_labels = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] index = MultiIndex( levels=[major_axis, minor_axis], labels=[major_labels, minor_labels], names=index_names, verify_integrity=False ) return index @pytest.fixture def index_names(): # names that match those in the idx fixture for testing equality of # names assigned to the idx return ['first', 'second'] @pytest.fixture def holder(): # the MultiIndex constructor used to base compatibility with pickle return MultiIndex @pytest.fixture def compat_props(): # a MultiIndex must have these properties associated with it return ['shape', 'ndim', 'size']
# -*- coding: utf-8 -*- from __future__ import print_function import pytest from datetime import datetime import re from pandas.compat import (zip, range, lrange, StringIO) from pandas import (DataFrame, Series, Index, date_range, compat, Timestamp) import pandas as pd from numpy import nan import numpy as np from pandas.util.testing import (assert_series_equal, assert_frame_equal) import pandas.util.testing as tm from pandas.tests.frame.common import TestData class TestDataFrameReplace(TestData): def test_replace_inplace(self): self.tsframe['A'][:5] = nan self.tsframe['A'][-5:] = nan tsframe = self.tsframe.copy() tsframe.replace(nan, 0, inplace=True) assert_frame_equal(tsframe, self.tsframe.fillna(0)) # mixed type mf = self.mixed_frame mf.iloc[5:20, mf.columns.get_loc('foo')] = nan mf.iloc[-10:, mf.columns.get_loc('A')] = nan result = self.mixed_frame.replace(np.nan, 0) expected = self.mixed_frame.fillna(value=0) assert_frame_equal(result, expected) tsframe = self.tsframe.copy() tsframe.replace([nan], [0], inplace=True) assert_frame_equal(tsframe, self.tsframe.fillna(0)) def test_regex_replace_scalar(self): obj = {'a': list('ab..'), 'b': list('efgh')} dfobj = DataFrame(obj) mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # simplest cases # regex -> value # obj frame res = dfobj.replace(r'\s*\.\s*', nan, regex=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.replace(r'\s*\.\s*', nan, regex=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) # everything with compiled regexs as well res = dfobj.replace(re.compile(r'\s*\.\s*'), nan, regex=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.replace(re.compile(r'\s*\.\s*'), nan, regex=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1') objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1') mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) res = dfmix.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1') mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) res = dfmix.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1') mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) def test_regex_replace_scalar_inplace(self): obj = {'a': list('ab..'), 'b': list('efgh')} dfobj = DataFrame(obj) mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # simplest cases # regex -> value # obj frame res = dfobj.copy() res.replace(r'\s*\.\s*', nan, regex=True, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(r'\s*\.\s*', nan, regex=True, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True, inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True, inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) # everything with compiled regexs as well res = dfobj.copy() res.replace(re.compile(r'\s*\.\s*'), nan, regex=True, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(re.compile(r'\s*\.\s*'), nan, regex=True, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1', regex=True, inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1', regex=True, inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) res = dfobj.copy() res.replace(regex=r'\s*\.\s*', value=nan, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(regex=r'\s*\.\s*', value=nan, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1', inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1', inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) # everything with compiled regexs as well res = dfobj.copy() res.replace(regex=re.compile(r'\s*\.\s*'), value=nan, inplace=True) assert_frame_equal(dfobj, res.fillna('.')) # mixed res = dfmix.copy() res.replace(regex=re.compile(r'\s*\.\s*'), value=nan, inplace=True) assert_frame_equal(dfmix, res.fillna('.')) # regex -> regex # obj frame res = dfobj.copy() res.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1', inplace=True) objc = obj.copy() objc['a'] = ['a', 'b', '...', '...'] expec = DataFrame(objc) assert_frame_equal(res, expec) # with mixed res = dfmix.copy() res.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1', inplace=True) mixc = mix.copy() mixc['b'] = ['a', 'b', '...', '...'] expec = DataFrame(mixc) assert_frame_equal(res, expec) def test_regex_replace_list_obj(self): obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')} dfobj = DataFrame(obj) # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'e|f|g'] values = [nan, 'crap'] res = dfobj.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap'] * 3 + ['h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(e|f|g)'] values = [r'\1\1', r'\1_crap'] res = dfobj.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e_crap', 'f_crap', 'g_crap', 'h'], 'c': ['h', 'e_crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.replace(value=values, regex=to_replace_res) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) def test_regex_replace_list_obj_inplace(self): # same as above with inplace=True # lists of regexes and values obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')} dfobj = DataFrame(obj) # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'e|f|g'] values = [nan, 'crap'] res = dfobj.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap'] * 3 + ['h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(e|f|g)'] values = [r'\1\1', r'\1_crap'] res = dfobj.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e_crap', 'f_crap', 'g_crap', 'h'], 'c': ['h', 'e_crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'e'] values = [r'\1\1', r'crap'] res = dfobj.copy() res.replace(value=values, regex=to_replace_res, inplace=True) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) def test_regex_replace_list_mixed(self): # mixed frame to make sure this doesn't break things mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'a'] values = [nan, 'crap'] mix2 = {'a': lrange(4), 'b': list('ab..'), 'c': list('halo')} dfmix2 = DataFrame(mix2) res = dfmix2.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': mix2['a'], 'b': ['crap', 'b', nan, nan], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(a|b)'] values = [r'\1\1', r'\1_crap'] res = dfmix.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a_crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.replace(to_replace_res, values, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.replace(regex=to_replace_res, value=values) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) def test_regex_replace_list_mixed_inplace(self): mix = {'a': lrange(4), 'b': list('ab..')} dfmix = DataFrame(mix) # the same inplace # lists of regexes and values # list of [re1, re2, ..., reN] -> [v1, v2, ..., vN] to_replace_res = [r'\s*\.\s*', r'a'] values = [nan, 'crap'] res = dfmix.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b', nan, nan]}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [re1, re2, .., reN] to_replace_res = [r'\s*(\.)\s*', r'(a|b)'] values = [r'\1\1', r'\1_crap'] res = dfmix.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a_crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) # list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN # or vN)] to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.copy() res.replace(to_replace_res, values, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)'] values = [r'\1\1', r'crap', r'\1_crap'] res = dfmix.copy() res.replace(regex=to_replace_res, value=values, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']}) assert_frame_equal(res, expec) def test_regex_replace_dict_mixed(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} dfmix = DataFrame(mix) # dicts # single dict {re1: v1}, search the whole frame # need test for this... # list of dicts {re1: v1, re2: v2, ..., re3: v3}, search the whole # frame res = dfmix.replace({'b': r'\s*\.\s*'}, {'b': nan}, regex=True) res2 = dfmix.copy() res2.replace({'b': r'\s*\.\s*'}, {'b': nan}, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) # list of dicts {re1: re11, re2: re12, ..., reN: re1N}, search the # whole frame res = dfmix.replace({'b': r'\s*(\.)\s*'}, {'b': r'\1ty'}, regex=True) res2 = dfmix.copy() res2.replace({'b': r'\s*(\.)\s*'}, {'b': r'\1ty'}, inplace=True, regex=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', '.ty', '.ty'], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) res = dfmix.replace(regex={'b': r'\s*(\.)\s*'}, value={'b': r'\1ty'}) res2 = dfmix.copy() res2.replace(regex={'b': r'\s*(\.)\s*'}, value={'b': r'\1ty'}, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', '.ty', '.ty'], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) # scalar -> dict # to_replace regex, {value: value} expec = DataFrame({'a': mix['a'], 'b': [nan, 'b', '.', '.'], 'c': mix['c']}) res = dfmix.replace('a', {'b': nan}, regex=True) res2 = dfmix.copy() res2.replace('a', {'b': nan}, regex=True, inplace=True) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) res = dfmix.replace('a', {'b': nan}, regex=True) res2 = dfmix.copy() res2.replace(regex='a', value={'b': nan}, inplace=True) expec = DataFrame({'a': mix['a'], 'b': [nan, 'b', '.', '.'], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) def test_regex_replace_dict_nested(self): # nested dicts will not work until this is implemented for Series mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} dfmix = DataFrame(mix) res = dfmix.replace({'b': {r'\s*\.\s*': nan}}, regex=True) res2 = dfmix.copy() res4 = dfmix.copy() res2.replace({'b': {r'\s*\.\s*': nan}}, inplace=True, regex=True) res3 = dfmix.replace(regex={'b': {r'\s*\.\s*': nan}}) res4.replace(regex={'b': {r'\s*\.\s*': nan}}, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) assert_frame_equal(res4, expec) def test_regex_replace_dict_nested_gh4115(self): df = pd.DataFrame({'Type': ['Q', 'T', 'Q', 'Q', 'T'], 'tmp': 2}) expected = DataFrame({'Type': [0, 1, 0, 0, 1], 'tmp': 2}) result = df.replace({'Type': {'Q': 0, 'T': 1}}) assert_frame_equal(result, expected) def test_regex_replace_list_to_scalar(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) expec = DataFrame({'a': mix['a'], 'b': np.array([nan] * 4), 'c': [nan, nan, nan, 'd']}) res = df.replace([r'\s*\.\s*', 'a|b'], nan, regex=True) res2 = df.copy() res3 = df.copy() res2.replace([r'\s*\.\s*', 'a|b'], nan, regex=True, inplace=True) res3.replace(regex=[r'\s*\.\s*', 'a|b'], value=nan, inplace=True) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_str_to_numeric(self): # what happens when you try to replace a numeric value with a regex? mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) res = df.replace(r'\s*\.\s*', 0, regex=True) res2 = df.copy() res2.replace(r'\s*\.\s*', 0, inplace=True, regex=True) res3 = df.copy() res3.replace(regex=r'\s*\.\s*', value=0, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', 0, 0], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_regex_list_to_numeric(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) res = df.replace([r'\s*\.\s*', 'b'], 0, regex=True) res2 = df.copy() res2.replace([r'\s*\.\s*', 'b'], 0, regex=True, inplace=True) res3 = df.copy() res3.replace(regex=[r'\s*\.\s*', 'b'], value=0, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 0, 0, 0], 'c': ['a', 0, nan, 'd']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_series_of_regexes(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) s1 = Series({'b': r'\s*\.\s*'}) s2 = Series({'b': nan}) res = df.replace(s1, s2, regex=True) res2 = df.copy() res2.replace(s1, s2, inplace=True, regex=True) res3 = df.copy() res3.replace(regex=s1, value=s2, inplace=True) expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c': mix['c']}) assert_frame_equal(res, expec) assert_frame_equal(res2, expec) assert_frame_equal(res3, expec) def test_regex_replace_numeric_to_object_conversion(self): mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) expec = DataFrame({'a': ['a', 1, 2, 3], 'b': mix['b'], 'c': mix['c']}) res = df.replace(0, 'a') assert_frame_equal(res, expec) assert res.a.dtype == np.object_ def test_replace_regex_metachar(self): metachars = '[]', '()', r'\d', r'\w', r'\s' for metachar in metachars: df = DataFrame({'a': [metachar, 'else']}) result = df.replace({'a': {metachar: 'paren'}}) expected = DataFrame({'a': ['paren', 'else']}) assert_frame_equal(result, expected) def test_replace(self): self.tsframe['A'][:5] = nan self.tsframe['A'][-5:] = nan zero_filled = self.tsframe.replace(nan, -1e8) assert_frame_equal(zero_filled, self.tsframe.fillna(-1e8)) assert_frame_equal(zero_filled.replace(-1e8, nan), self.tsframe) self.tsframe['A'][:5] = nan self.tsframe['A'][-5:] = nan self.tsframe['B'][:5] = -1e8 # empty df = DataFrame(index=['a', 'b']) assert_frame_equal(df, df.replace(5, 7)) # GH 11698 # test for mixed data types. df = pd.DataFrame([('-', pd.to_datetime('20150101')), ('a', pd.to_datetime('20150102'))]) df1 = df.replace('-', np.nan) expected_df = pd.DataFrame([(np.nan, pd.to_datetime('20150101')), ('a', pd.to_datetime('20150102'))]) assert_frame_equal(df1, expected_df) def test_replace_list(self): obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')} dfobj = DataFrame(obj) # lists of regexes and values # list of [v1, v2, ..., vN] -> [v1, v2, ..., vN] to_replace_res = [r'.', r'e'] values = [nan, 'crap'] res = dfobj.replace(to_replace_res, values) expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap', 'l', 'o']}) assert_frame_equal(res, expec) # list of [v1, v2, ..., vN] -> [v1, v2, .., vN] to_replace_res = [r'.', r'f'] values = [r'..', r'crap'] res = dfobj.replace(to_replace_res, values) expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e', 'crap', 'g', 'h'], 'c': ['h', 'e', 'l', 'o']}) assert_frame_equal(res, expec) def test_replace_series_dict(self): # from GH 3064 df = DataFrame({'zero': {'a': 0.0, 'b': 1}, 'one': {'a': 2.0, 'b': 0}}) result = df.replace(0, {'zero': 0.5, 'one': 1.0}) expected = DataFrame( {'zero': {'a': 0.5, 'b': 1}, 'one': {'a': 2.0, 'b': 1.0}}) assert_frame_equal(result, expected) result = df.replace(0, df.mean()) assert_frame_equal(result, expected) # series to series/dict df = DataFrame({'zero': {'a': 0.0, 'b': 1}, 'one': {'a': 2.0, 'b': 0}}) s = Series({'zero': 0.0, 'one': 2.0}) result = df.replace(s, {'zero': 0.5, 'one': 1.0}) expected = DataFrame( {'zero': {'a': 0.5, 'b': 1}, 'one': {'a': 1.0, 'b': 0.0}}) assert_frame_equal(result, expected) result = df.replace(s, df.mean()) assert_frame_equal(result, expected) def test_replace_convert(self): # gh 3907 df = DataFrame([['foo', 'bar', 'bah'], ['bar', 'foo', 'bah']]) m = {'foo': 1, 'bar': 2, 'bah': 3} rep = df.replace(m) expec = Series([np.int64] * 3) res = rep.dtypes assert_series_equal(expec, res) def test_replace_mixed(self): mf = self.mixed_frame mf.iloc[5:20, mf.columns.get_loc('foo')] = nan mf.iloc[-10:, mf.columns.get_loc('A')] = nan result = self.mixed_frame.replace(np.nan, -18) expected = self.mixed_frame.fillna(value=-18) assert_frame_equal(result, expected) assert_frame_equal(result.replace(-18, nan), self.mixed_frame) result = self.mixed_frame.replace(np.nan, -1e8) expected = self.mixed_frame.fillna(value=-1e8) assert_frame_equal(result, expected) assert_frame_equal(result.replace(-1e8, nan), self.mixed_frame) # int block upcasting df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0, 1], dtype='int64')}) expected = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0.5, 1], dtype='float64')}) result = df.replace(0, 0.5) assert_frame_equal(result, expected) df.replace(0, 0.5, inplace=True) assert_frame_equal(df, expected) # int block splitting df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0, 1], dtype='int64'), 'C': Series([1, 2], dtype='int64')}) expected = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0.5, 1], dtype='float64'), 'C': Series([1, 2], dtype='int64')}) result = df.replace(0, 0.5) assert_frame_equal(result, expected) # to object block upcasting df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'), 'B': Series([0, 1], dtype='int64')}) expected = DataFrame({'A': Series([1, 'foo'], dtype='object'), 'B': Series([0, 1], dtype='int64')}) result = df.replace(2, 'foo') assert_frame_equal(result, expected) expected = DataFrame({'A': Series(['foo', 'bar'], dtype='object'), 'B': Series([0, 'foo'], dtype='object')}) result = df.replace([1, 2], ['foo', 'bar']) assert_frame_equal(result, expected) # test case from df = DataFrame({'A': Series([3, 0], dtype='int64'), 'B': Series([0, 3], dtype='int64')}) result = df.replace(3, df.mean().to_dict()) expected = df.copy().astype('float64') m = df.mean() expected.iloc[0, 0] = m[0] expected.iloc[1, 1] = m[1] assert_frame_equal(result, expected) def test_replace_simple_nested_dict(self): df = DataFrame({'col': range(1, 5)}) expected = DataFrame({'col': ['a', 2, 3, 'b']}) result = df.replace({'col': {1: 'a', 4: 'b'}}) assert_frame_equal(expected, result) # in this case, should be the same as the not nested version result = df.replace({1: 'a', 4: 'b'}) assert_frame_equal(expected, result) def test_replace_simple_nested_dict_with_nonexistent_value(self): df = DataFrame({'col': range(1, 5)}) expected = DataFrame({'col': ['a', 2, 3, 'b']}) result = df.replace({-1: '-', 1: 'a', 4: 'b'}) assert_frame_equal(expected, result) result = df.replace({'col': {-1: '-', 1: 'a', 4: 'b'}}) assert_frame_equal(expected, result) def test_replace_value_is_none(self): orig_value = self.tsframe.iloc[0, 0] orig2 = self.tsframe.iloc[1, 0] self.tsframe.iloc[0, 0] = nan self.tsframe.iloc[1, 0] = 1 result = self.tsframe.replace(to_replace={nan: 0}) expected = self.tsframe.T.replace(to_replace={nan: 0}).T assert_frame_equal(result, expected) result = self.tsframe.replace(to_replace={nan: 0, 1: -1e8}) tsframe = self.tsframe.copy() tsframe.iloc[0, 0] = 0 tsframe.iloc[1, 0] = -1e8 expected = tsframe assert_frame_equal(expected, result) self.tsframe.iloc[0, 0] = orig_value self.tsframe.iloc[1, 0] = orig2 def test_replace_for_new_dtypes(self): # dtypes tsframe = self.tsframe.copy().astype(np.float32) tsframe['A'][:5] = nan tsframe['A'][-5:] = nan zero_filled = tsframe.replace(nan, -1e8) assert_frame_equal(zero_filled, tsframe.fillna(-1e8)) assert_frame_equal(zero_filled.replace(-1e8, nan), tsframe) tsframe['A'][:5] = nan tsframe['A'][-5:] = nan tsframe['B'][:5] = -1e8 b = tsframe['B'] b[b == -1e8] = nan tsframe['B'] = b result = tsframe.fillna(method='bfill') assert_frame_equal(result, tsframe.fillna(method='bfill')) def test_replace_dtypes(self): # int df = DataFrame({'ints': [1, 2, 3]}) result = df.replace(1, 0) expected = DataFrame({'ints': [0, 2, 3]}) assert_frame_equal(result, expected) df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int32) result = df.replace(1, 0) expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int32) assert_frame_equal(result, expected) df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int16) result = df.replace(1, 0) expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int16) assert_frame_equal(result, expected) # bools df = DataFrame({'bools': [True, False, True]}) result = df.replace(False, True) assert result.values.all() # complex blocks df = DataFrame({'complex': [1j, 2j, 3j]}) result = df.replace(1j, 0j) expected = DataFrame({'complex': [0j, 2j, 3j]}) assert_frame_equal(result, expected) # datetime blocks prev = datetime.today() now = datetime.today() df = DataFrame({'datetime64': Index([prev, now, prev])}) result = df.replace(prev, now) expected = DataFrame({'datetime64': Index([now] * 3)}) assert_frame_equal(result, expected) def test_replace_input_formats_listlike(self): # both dicts to_rep = {'A': np.nan, 'B': 0, 'C': ''} values = {'A': 0, 'B': -1, 'C': 'missing'} df = DataFrame({'A': [np.nan, 0, np.inf], 'B': [0, 2, 5], 'C': ['', 'asdf', 'fd']}) filled = df.replace(to_rep, values) expected = {} for k, v in compat.iteritems(df): expected[k] = v.replace(to_rep[k], values[k]) assert_frame_equal(filled, DataFrame(expected)) result = df.replace([0, 2, 5], [5, 2, 0]) expected = DataFrame({'A': [np.nan, 5, np.inf], 'B': [5, 2, 0], 'C': ['', 'asdf', 'fd']}) assert_frame_equal(result, expected) # scalar to dict values = {'A': 0, 'B': -1, 'C': 'missing'} df = DataFrame({'A': [np.nan, 0, np.nan], 'B': [0, 2, 5], 'C': ['', 'asdf', 'fd']}) filled = df.replace(np.nan, values) expected = {} for k, v in compat.iteritems(df): expected[k] = v.replace(np.nan, values[k]) assert_frame_equal(filled, DataFrame(expected)) # list to list to_rep = [np.nan, 0, ''] values = [-2, -1, 'missing'] result = df.replace(to_rep, values) expected = df.copy() for i in range(len(to_rep)): expected.replace(to_rep[i], values[i], inplace=True) assert_frame_equal(result, expected) pytest.raises(ValueError, df.replace, to_rep, values[1:]) def test_replace_input_formats_scalar(self): df = DataFrame({'A': [np.nan, 0, np.inf], 'B': [0, 2, 5], 'C': ['', 'asdf', 'fd']}) # dict to scalar to_rep = {'A': np.nan, 'B': 0, 'C': ''} filled = df.replace(to_rep, 0) expected = {} for k, v in compat.iteritems(df): expected[k] = v.replace(to_rep[k], 0) assert_frame_equal(filled, DataFrame(expected)) pytest.raises(TypeError, df.replace, to_rep, [np.nan, 0, '']) # list to scalar to_rep = [np.nan, 0, ''] result = df.replace(to_rep, -1) expected = df.copy() for i in range(len(to_rep)): expected.replace(to_rep[i], -1, inplace=True) assert_frame_equal(result, expected) def test_replace_limit(self): pass def test_replace_dict_no_regex(self): answer = Series({0: 'Strongly Agree', 1: 'Agree', 2: 'Neutral', 3: 'Disagree', 4: 'Strongly Disagree'}) weights = {'Agree': 4, 'Disagree': 2, 'Neutral': 3, 'Strongly Agree': 5, 'Strongly Disagree': 1} expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1}) result = answer.replace(weights) assert_series_equal(result, expected) def test_replace_series_no_regex(self): answer = Series({0: 'Strongly Agree', 1: 'Agree', 2: 'Neutral', 3: 'Disagree', 4: 'Strongly Disagree'}) weights = Series({'Agree': 4, 'Disagree': 2, 'Neutral': 3, 'Strongly Agree': 5, 'Strongly Disagree': 1}) expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1}) result = answer.replace(weights) assert_series_equal(result, expected) def test_replace_dict_tuple_list_ordering_remains_the_same(self): df = DataFrame(dict(A=[nan, 1])) res1 = df.replace(to_replace={nan: 0, 1: -1e8}) res2 = df.replace(to_replace=(1, nan), value=[-1e8, 0]) res3 = df.replace(to_replace=[1, nan], value=[-1e8, 0]) expected = DataFrame({'A': [0, -1e8]}) assert_frame_equal(res1, res2) assert_frame_equal(res2, res3) assert_frame_equal(res3, expected) def test_replace_doesnt_replace_without_regex(self): raw = """fol T_opp T_Dir T_Enh 0 1 0 0 vo 1 2 vr 0 0 2 2 0 0 0 3 3 0 bt 0""" df = pd.read_csv(StringIO(raw), sep=r'\s+') res = df.replace({r'\D': 1}) assert_frame_equal(df, res) def test_replace_bool_with_string(self): df = DataFrame({'a': [True, False], 'b': list('ab')}) result = df.replace(True, 'a') expected = DataFrame({'a': ['a', False], 'b': df.b}) assert_frame_equal(result, expected) def test_replace_pure_bool_with_string_no_op(self): df = DataFrame(np.random.rand(2, 2) > 0.5) result = df.replace('asdf', 'fdsa') assert_frame_equal(df, result) def test_replace_bool_with_bool(self): df = DataFrame(np.random.rand(2, 2) > 0.5) result = df.replace(False, True) expected = DataFrame(np.ones((2, 2), dtype=bool)) assert_frame_equal(result, expected) def test_replace_with_dict_with_bool_keys(self): df = DataFrame({0: [True, False], 1: [False, True]}) with tm.assert_raises_regex(TypeError, 'Cannot compare types .+'): df.replace({'asdf': 'asdb', True: 'yes'}) def test_replace_truthy(self): df = DataFrame({'a': [True, True]}) r = df.replace([np.inf, -np.inf], np.nan) e = df assert_frame_equal(r, e) def test_replace_int_to_int_chain(self): df = DataFrame({'a': lrange(1, 5)}) with tm.assert_raises_regex(ValueError, "Replacement not allowed .+"): df.replace({'a': dict(zip(range(1, 5), range(2, 6)))}) def test_replace_str_to_str_chain(self): a = np.arange(1, 5) astr = a.astype(str) bstr = np.arange(2, 6).astype(str) df = DataFrame({'a': astr}) with tm.assert_raises_regex(ValueError, "Replacement not allowed .+"): df.replace({'a': dict(zip(astr, bstr))}) def test_replace_swapping_bug(self): df = pd.DataFrame({'a': [True, False, True]}) res = df.replace({'a': {True: 'Y', False: 'N'}}) expect = pd.DataFrame({'a': ['Y', 'N', 'Y']}) assert_frame_equal(res, expect) df = pd.DataFrame({'a': [0, 1, 0]}) res = df.replace({'a': {0: 'Y', 1: 'N'}}) expect = pd.DataFrame({'a': ['Y', 'N', 'Y']}) assert_frame_equal(res, expect) def test_replace_period(self): d = { 'fname': { 'out_augmented_AUG_2011.json': pd.Period(year=2011, month=8, freq='M'), 'out_augmented_JAN_2011.json': pd.Period(year=2011, month=1, freq='M'), 'out_augmented_MAY_2012.json': pd.Period(year=2012, month=5, freq='M'), 'out_augmented_SUBSIDY_WEEK.json': pd.Period(year=2011, month=4, freq='M'), 'out_augmented_AUG_2012.json': pd.Period(year=2012, month=8, freq='M'), 'out_augmented_MAY_2011.json': pd.Period(year=2011, month=5, freq='M'), 'out_augmented_SEP_2013.json': pd.Period(year=2013, month=9, freq='M')}} df = pd.DataFrame(['out_augmented_AUG_2012.json', 'out_augmented_SEP_2013.json', 'out_augmented_SUBSIDY_WEEK.json', 'out_augmented_MAY_2012.json', 'out_augmented_MAY_2011.json', 'out_augmented_AUG_2011.json', 'out_augmented_JAN_2011.json'], columns=['fname']) assert set(df.fname.values) == set(d['fname'].keys()) expected = DataFrame({'fname': [d['fname'][k] for k in df.fname.values]}) result = df.replace(d) assert_frame_equal(result, expected) def test_replace_datetime(self): d = {'fname': {'out_augmented_AUG_2011.json': pd.Timestamp('2011-08'), 'out_augmented_JAN_2011.json': pd.Timestamp('2011-01'), 'out_augmented_MAY_2012.json': pd.Timestamp('2012-05'), 'out_augmented_SUBSIDY_WEEK.json': pd.Timestamp('2011-04'), 'out_augmented_AUG_2012.json': pd.Timestamp('2012-08'), 'out_augmented_MAY_2011.json': pd.Timestamp('2011-05'), 'out_augmented_SEP_2013.json': pd.Timestamp('2013-09')}} df = pd.DataFrame(['out_augmented_AUG_2012.json', 'out_augmented_SEP_2013.json', 'out_augmented_SUBSIDY_WEEK.json', 'out_augmented_MAY_2012.json', 'out_augmented_MAY_2011.json', 'out_augmented_AUG_2011.json', 'out_augmented_JAN_2011.json'], columns=['fname']) assert set(df.fname.values) == set(d['fname'].keys()) expected = DataFrame({'fname': [d['fname'][k] for k in df.fname.values]}) result = df.replace(d) assert_frame_equal(result, expected) def test_replace_datetimetz(self): # GH 11326 # behaving poorly when presented with a datetime64[ns, tz] df = DataFrame({'A': date_range('20130101', periods=3, tz='US/Eastern'), 'B': [0, np.nan, 2]}) result = df.replace(np.nan, 1) expected = DataFrame({'A': date_range('20130101', periods=3, tz='US/Eastern'), 'B': Series([0, 1, 2], dtype='float64')}) assert_frame_equal(result, expected) result = df.fillna(1) assert_frame_equal(result, expected) result = df.replace(0, np.nan) expected = DataFrame({'A': date_range('20130101', periods=3, tz='US/Eastern'), 'B': [np.nan, np.nan, 2]}) assert_frame_equal(result, expected) result = df.replace(Timestamp('20130102', tz='US/Eastern'), Timestamp('20130104', tz='US/Eastern')) expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'), Timestamp('20130104', tz='US/Eastern'), Timestamp('20130103', tz='US/Eastern')], 'B': [0, np.nan, 2]}) assert_frame_equal(result, expected) result = df.copy() result.iloc[1, 0] = np.nan result = result.replace( {'A': pd.NaT}, Timestamp('20130104', tz='US/Eastern')) assert_frame_equal(result, expected) # coerce to object result = df.copy() result.iloc[1, 0] = np.nan result = result.replace( {'A': pd.NaT}, Timestamp('20130104', tz='US/Pacific')) expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'), Timestamp('20130104', tz='US/Pacific'), Timestamp('20130103', tz='US/Eastern')], 'B': [0, np.nan, 2]}) assert_frame_equal(result, expected) result = df.copy() result.iloc[1, 0] = np.nan result = result.replace({'A': np.nan}, Timestamp('20130104')) expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'), Timestamp('20130104'), Timestamp('20130103', tz='US/Eastern')], 'B': [0, np.nan, 2]}) assert_frame_equal(result, expected) def test_replace_with_empty_dictlike(self): # GH 15289 mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']} df = DataFrame(mix) assert_frame_equal(df, df.replace({})) assert_frame_equal(df, df.replace(Series([]))) assert_frame_equal(df, df.replace({'b': {}})) assert_frame_equal(df, df.replace(Series({'b': {}}))) @pytest.mark.parametrize("to_replace, method, expected", [ (0, 'bfill', {'A': [1, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), (nan, 'bfill', {'A': [0, 1, 2], 'B': [5.0, 7.0, 7.0], 'C': ['a', 'b', 'c']}), ('d', 'ffill', {'A': [0, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), ([0, 2], 'bfill', {'A': [1, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), ([1, 2], 'pad', {'A': [0, 0, 0], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), ((1, 2), 'bfill', {'A': [0, 2, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}), (['b', 'c'], 'ffill', {'A': [0, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'a', 'a']}), ]) def test_replace_method(self, to_replace, method, expected): # GH 19632 df = DataFrame({'A': [0, 1, 2], 'B': [5, nan, 7], 'C': ['a', 'b', 'c']}) result = df.replace(to_replace=to_replace, value=None, method=method) expected = DataFrame(expected) assert_frame_equal(result, expected)
pratapvardhan/pandas
pandas/tests/frame/test_replace.py
pandas/tests/indexes/multi/conftest.py
# Copyright (c) 2015-2016 Cisco Systems # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from molecule.command import base class List(base.Base): """ Prints a list of currently available platforms Usage: list [--debug] ([-m]|[--porcelain]) Options: --debug get more detail -m synonym for '--porcelain' (deprecated) --porcelain machine readable output """ def execute(self): porcelain = self.molecule._args['-m'] or self.molecule._args[ '--porcelain'] self.molecule._print_valid_platforms(porcelain=porcelain) return None, None
# Copyright (c) 2015-2016 Cisco Systems # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import re import pytest from molecule import ansible_galaxy from molecule import config @pytest.fixture() def ansible_galaxy_instance(temp_files): confs = temp_files(fixtures=['molecule_vagrant_config']) c = config.Config(configs=confs) c.config['ansible']['requirements_file'] = 'requirements.yml' return ansible_galaxy.AnsibleGalaxy(c.config) def test_add_env_arg(ansible_galaxy_instance): ansible_galaxy_instance.add_env_arg('MOLECULE_1', 'test') assert 'test' == ansible_galaxy_instance.env['MOLECULE_1'] def test_install(mocker, ansible_galaxy_instance): mocked = mocker.patch('molecule.ansible_galaxy.AnsibleGalaxy.execute') ansible_galaxy_instance.install() mocked.assert_called_once # NOTE(retr0h): The following is a somewhat gross test, but need to # handle **kwargs expansion being unordered. pieces = str(ansible_galaxy_instance._galaxy).split() expected = ['--force', '--role-file=requirements.yml', '--roles-path=test/roles'] assert re.search(r'ansible-galaxy', pieces[0]) assert 'install' == pieces[1] assert expected == sorted(pieces[2:]) def test_install_overrides(mocker, ansible_galaxy_instance): ansible_galaxy_instance._config['ansible']['galaxy'] = {'foo': 'bar', 'force': False} mocked = mocker.patch('molecule.ansible_galaxy.AnsibleGalaxy.execute') ansible_galaxy_instance.install() mocked.assert_called_once pieces = str(ansible_galaxy_instance._galaxy).split() expected = ['--foo=bar', '--role-file=requirements.yml', '--roles-path=test/roles'] assert expected == sorted(pieces[2:])
rjfellman/molecule
test/unit/test_ansible_galaxy.py
molecule/command/list.py
# Created by Pearu Peterson, June 2003 import numpy as np from numpy.testing import (assert_equal, assert_almost_equal, assert_array_equal, assert_array_almost_equal, assert_allclose, suppress_warnings) from pytest import raises as assert_raises from numpy import array, diff, linspace, meshgrid, ones, pi, shape from scipy.interpolate.fitpack import bisplrep, bisplev from scipy.interpolate.fitpack2 import (UnivariateSpline, LSQUnivariateSpline, InterpolatedUnivariateSpline, LSQBivariateSpline, SmoothBivariateSpline, RectBivariateSpline, LSQSphereBivariateSpline, SmoothSphereBivariateSpline, RectSphereBivariateSpline) class TestUnivariateSpline: def test_linear_constant(self): x = [1,2,3] y = [3,3,3] lut = UnivariateSpline(x,y,k=1) assert_array_almost_equal(lut.get_knots(),[1,3]) assert_array_almost_equal(lut.get_coeffs(),[3,3]) assert_almost_equal(lut.get_residual(),0.0) assert_array_almost_equal(lut([1,1.5,2]),[3,3,3]) def test_preserve_shape(self): x = [1, 2, 3] y = [0, 2, 4] lut = UnivariateSpline(x, y, k=1) arg = 2 assert_equal(shape(arg), shape(lut(arg))) assert_equal(shape(arg), shape(lut(arg, nu=1))) arg = [1.5, 2, 2.5] assert_equal(shape(arg), shape(lut(arg))) assert_equal(shape(arg), shape(lut(arg, nu=1))) def test_linear_1d(self): x = [1,2,3] y = [0,2,4] lut = UnivariateSpline(x,y,k=1) assert_array_almost_equal(lut.get_knots(),[1,3]) assert_array_almost_equal(lut.get_coeffs(),[0,4]) assert_almost_equal(lut.get_residual(),0.0) assert_array_almost_equal(lut([1,1.5,2]),[0,1,2]) def test_subclassing(self): # See #731 class ZeroSpline(UnivariateSpline): def __call__(self, x): return 0*array(x) sp = ZeroSpline([1,2,3,4,5], [3,2,3,2,3], k=2) assert_array_equal(sp([1.5, 2.5]), [0., 0.]) def test_empty_input(self): # Test whether empty input returns an empty output. Ticket 1014 x = [1,3,5,7,9] y = [0,4,9,12,21] spl = UnivariateSpline(x, y, k=3) assert_array_equal(spl([]), array([])) def test_resize_regression(self): """Regression test for #1375.""" x = [-1., -0.65016502, -0.58856235, -0.26903553, -0.17370892, -0.10011001, 0., 0.10011001, 0.17370892, 0.26903553, 0.58856235, 0.65016502, 1.] y = [1.,0.62928599, 0.5797223, 0.39965815, 0.36322694, 0.3508061, 0.35214793, 0.3508061, 0.36322694, 0.39965815, 0.5797223, 0.62928599, 1.] w = [1.00000000e+12, 6.88875973e+02, 4.89314737e+02, 4.26864807e+02, 6.07746770e+02, 4.51341444e+02, 3.17480210e+02, 4.51341444e+02, 6.07746770e+02, 4.26864807e+02, 4.89314737e+02, 6.88875973e+02, 1.00000000e+12] spl = UnivariateSpline(x=x, y=y, w=w, s=None) desired = array([0.35100374, 0.51715855, 0.87789547, 0.98719344]) assert_allclose(spl([0.1, 0.5, 0.9, 0.99]), desired, atol=5e-4) def test_out_of_range_regression(self): # Test different extrapolation modes. See ticket 3557 x = np.arange(5, dtype=float) y = x**3 xp = linspace(-8, 13, 100) xp_zeros = xp.copy() xp_zeros[np.logical_or(xp_zeros < 0., xp_zeros > 4.)] = 0 xp_clip = xp.copy() xp_clip[xp_clip < x[0]] = x[0] xp_clip[xp_clip > x[-1]] = x[-1] for cls in [UnivariateSpline, InterpolatedUnivariateSpline]: spl = cls(x=x, y=y) for ext in [0, 'extrapolate']: assert_allclose(spl(xp, ext=ext), xp**3, atol=1e-16) assert_allclose(cls(x, y, ext=ext)(xp), xp**3, atol=1e-16) for ext in [1, 'zeros']: assert_allclose(spl(xp, ext=ext), xp_zeros**3, atol=1e-16) assert_allclose(cls(x, y, ext=ext)(xp), xp_zeros**3, atol=1e-16) for ext in [2, 'raise']: assert_raises(ValueError, spl, xp, **dict(ext=ext)) for ext in [3, 'const']: assert_allclose(spl(xp, ext=ext), xp_clip**3, atol=1e-16) assert_allclose(cls(x, y, ext=ext)(xp), xp_clip**3, atol=1e-16) # also test LSQUnivariateSpline [which needs explicit knots] t = spl.get_knots()[3:4] # interior knots w/ default k=3 spl = LSQUnivariateSpline(x, y, t) assert_allclose(spl(xp, ext=0), xp**3, atol=1e-16) assert_allclose(spl(xp, ext=1), xp_zeros**3, atol=1e-16) assert_raises(ValueError, spl, xp, **dict(ext=2)) assert_allclose(spl(xp, ext=3), xp_clip**3, atol=1e-16) # also make sure that unknown values for `ext` are caught early for ext in [-1, 'unknown']: spl = UnivariateSpline(x, y) assert_raises(ValueError, spl, xp, **dict(ext=ext)) assert_raises(ValueError, UnivariateSpline, **dict(x=x, y=y, ext=ext)) def test_lsq_fpchec(self): xs = np.arange(100) * 1. ys = np.arange(100) * 1. knots = np.linspace(0, 99, 10) bbox = (-1, 101) assert_raises(ValueError, LSQUnivariateSpline, xs, ys, knots, bbox=bbox) def test_derivative_and_antiderivative(self): # Thin wrappers to splder/splantider, so light smoke test only. x = np.linspace(0, 1, 70)**3 y = np.cos(x) spl = UnivariateSpline(x, y, s=0) spl2 = spl.antiderivative(2).derivative(2) assert_allclose(spl(0.3), spl2(0.3)) spl2 = spl.antiderivative(1) assert_allclose(spl2(0.6) - spl2(0.2), spl.integral(0.2, 0.6)) def test_derivative_extrapolation(self): # Regression test for gh-10195: for a const-extrapolation spline # its derivative evaluates to zero for extrapolation x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5, 5] f = UnivariateSpline(x_values, y_values, ext='const', k=3) x = [-1, 0, -0.5, 9, 9.5, 10] assert_allclose(f.derivative()(x), 0, atol=1e-15) def test_integral_out_of_bounds(self): # Regression test for gh-7906: .integral(a, b) is wrong if both # a and b are out-of-bounds x = np.linspace(0., 1., 7) for ext in range(4): f = UnivariateSpline(x, x, s=0, ext=ext) for (a, b) in [(1, 1), (1, 5), (2, 5), (0, 0), (-2, 0), (-2, -1)]: assert_allclose(f.integral(a, b), 0, atol=1e-15) def test_nan(self): # bail out early if the input data contains nans x = np.arange(10, dtype=float) y = x**3 w = np.ones_like(x) # also test LSQUnivariateSpline [which needs explicit knots] spl = UnivariateSpline(x, y, check_finite=True) t = spl.get_knots()[3:4] # interior knots w/ default k=3 y_end = y[-1] for z in [np.nan, np.inf, -np.inf]: y[-1] = z assert_raises(ValueError, UnivariateSpline, **dict(x=x, y=y, check_finite=True)) assert_raises(ValueError, InterpolatedUnivariateSpline, **dict(x=x, y=y, check_finite=True)) assert_raises(ValueError, LSQUnivariateSpline, **dict(x=x, y=y, t=t, check_finite=True)) y[-1] = y_end # check valid y but invalid w w[-1] = z assert_raises(ValueError, UnivariateSpline, **dict(x=x, y=y, w=w, check_finite=True)) assert_raises(ValueError, InterpolatedUnivariateSpline, **dict(x=x, y=y, w=w, check_finite=True)) assert_raises(ValueError, LSQUnivariateSpline, **dict(x=x, y=y, t=t, w=w, check_finite=True)) def test_strictly_increasing_x(self): # Test the x is required to be strictly increasing for # UnivariateSpline if s=0 and for InterpolatedUnivariateSpline, # but merely increasing for UnivariateSpline if s>0 # and for LSQUnivariateSpline; see gh-8535 xx = np.arange(10, dtype=float) yy = xx**3 x = np.arange(10, dtype=float) x[1] = x[0] y = x**3 w = np.ones_like(x) # also test LSQUnivariateSpline [which needs explicit knots] spl = UnivariateSpline(xx, yy, check_finite=True) t = spl.get_knots()[3:4] # interior knots w/ default k=3 UnivariateSpline(x=x, y=y, w=w, s=1, check_finite=True) LSQUnivariateSpline(x=x, y=y, t=t, w=w, check_finite=True) assert_raises(ValueError, UnivariateSpline, **dict(x=x, y=y, s=0, check_finite=True)) assert_raises(ValueError, InterpolatedUnivariateSpline, **dict(x=x, y=y, check_finite=True)) def test_increasing_x(self): # Test that x is required to be increasing, see gh-8535 xx = np.arange(10, dtype=float) yy = xx**3 x = np.arange(10, dtype=float) x[1] = x[0] - 1.0 y = x**3 w = np.ones_like(x) # also test LSQUnivariateSpline [which needs explicit knots] spl = UnivariateSpline(xx, yy, check_finite=True) t = spl.get_knots()[3:4] # interior knots w/ default k=3 assert_raises(ValueError, UnivariateSpline, **dict(x=x, y=y, check_finite=True)) assert_raises(ValueError, InterpolatedUnivariateSpline, **dict(x=x, y=y, check_finite=True)) assert_raises(ValueError, LSQUnivariateSpline, **dict(x=x, y=y, t=t, w=w, check_finite=True)) def test_invalid_input_for_univariate_spline(self): with assert_raises(ValueError) as info: x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5] UnivariateSpline(x_values, y_values) assert "x and y should have a same length" in str(info.value) with assert_raises(ValueError) as info: x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5, 2.8] w_values = [-1.0, 1.0, 1.0, 1.0] UnivariateSpline(x_values, y_values, w=w_values) assert "x, y, and w should have a same length" in str(info.value) with assert_raises(ValueError) as info: bbox = (-1) UnivariateSpline(x_values, y_values, bbox=bbox) assert "bbox shape should be (2,)" in str(info.value) with assert_raises(ValueError) as info: UnivariateSpline(x_values, y_values, k=6) assert "k should be 1 <= k <= 5" in str(info.value) with assert_raises(ValueError) as info: UnivariateSpline(x_values, y_values, s=-1.0) assert "s should be s >= 0.0" in str(info.value) def test_invalid_input_for_interpolated_univariate_spline(self): with assert_raises(ValueError) as info: x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5] InterpolatedUnivariateSpline(x_values, y_values) assert "x and y should have a same length" in str(info.value) with assert_raises(ValueError) as info: x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5, 2.8] w_values = [-1.0, 1.0, 1.0, 1.0] InterpolatedUnivariateSpline(x_values, y_values, w=w_values) assert "x, y, and w should have a same length" in str(info.value) with assert_raises(ValueError) as info: bbox = (-1) InterpolatedUnivariateSpline(x_values, y_values, bbox=bbox) assert "bbox shape should be (2,)" in str(info.value) with assert_raises(ValueError) as info: InterpolatedUnivariateSpline(x_values, y_values, k=6) assert "k should be 1 <= k <= 5" in str(info.value) def test_invalid_input_for_lsq_univariate_spline(self): x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5, 2.8] spl = UnivariateSpline(x_values, y_values, check_finite=True) t_values = spl.get_knots()[3:4] # interior knots w/ default k=3 with assert_raises(ValueError) as info: x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5] LSQUnivariateSpline(x_values, y_values, t_values) assert "x and y should have a same length" in str(info.value) with assert_raises(ValueError) as info: x_values = [1, 2, 4, 6, 8.5] y_values = [0.5, 0.8, 1.3, 2.5, 2.8] w_values = [1.0, 1.0, 1.0, 1.0] LSQUnivariateSpline(x_values, y_values, t_values, w=w_values) assert "x, y, and w should have a same length" in str(info.value) with assert_raises(ValueError) as info: bbox = (100, -100) LSQUnivariateSpline(x_values, y_values, t_values, bbox=bbox) assert "Interior knots t must satisfy Schoenberg-Whitney conditions" in str(info.value) with assert_raises(ValueError) as info: bbox = (-1) LSQUnivariateSpline(x_values, y_values, t_values, bbox=bbox) assert "bbox shape should be (2,)" in str(info.value) with assert_raises(ValueError) as info: LSQUnivariateSpline(x_values, y_values, t_values, k=6) assert "k should be 1 <= k <= 5" in str(info.value) def test_array_like_input(self): x_values = np.array([1, 2, 4, 6, 8.5]) y_values = np.array([0.5, 0.8, 1.3, 2.5, 2.8]) w_values = np.array([1.0, 1.0, 1.0, 1.0, 1.0]) bbox = np.array([-100, 100]) # np.array input spl1 = UnivariateSpline(x=x_values, y=y_values, w=w_values, bbox=bbox) # list input spl2 = UnivariateSpline(x=x_values.tolist(), y=y_values.tolist(), w=w_values.tolist(), bbox=bbox.tolist()) assert_allclose(spl1([0.1, 0.5, 0.9, 0.99]), spl2([0.1, 0.5, 0.9, 0.99])) class TestLSQBivariateSpline: # NOTE: The systems in this test class are rank-deficient def test_linear_constant(self): x = [1,1,1,2,2,2,3,3,3] y = [1,2,3,1,2,3,1,2,3] z = [3,3,3,3,3,3,3,3,3] s = 0.1 tx = [1+s,3-s] ty = [1+s,3-s] with suppress_warnings() as sup: r = sup.record(UserWarning, "\nThe coefficients of the spline") lut = LSQBivariateSpline(x,y,z,tx,ty,kx=1,ky=1) assert_equal(len(r), 1) assert_almost_equal(lut(2,2), 3.) def test_bilinearity(self): x = [1,1,1,2,2,2,3,3,3] y = [1,2,3,1,2,3,1,2,3] z = [0,7,8,3,4,7,1,3,4] s = 0.1 tx = [1+s,3-s] ty = [1+s,3-s] with suppress_warnings() as sup: # This seems to fail (ier=1, see ticket 1642). sup.filter(UserWarning, "\nThe coefficients of the spline") lut = LSQBivariateSpline(x,y,z,tx,ty,kx=1,ky=1) tx, ty = lut.get_knots() for xa, xb in zip(tx[:-1], tx[1:]): for ya, yb in zip(ty[:-1], ty[1:]): for t in [0.1, 0.5, 0.9]: for s in [0.3, 0.4, 0.7]: xp = xa*(1-t) + xb*t yp = ya*(1-s) + yb*s zp = (+ lut(xa, ya)*(1-t)*(1-s) + lut(xb, ya)*t*(1-s) + lut(xa, yb)*(1-t)*s + lut(xb, yb)*t*s) assert_almost_equal(lut(xp,yp), zp) def test_integral(self): x = [1,1,1,2,2,2,8,8,8] y = [1,2,3,1,2,3,1,2,3] z = array([0,7,8,3,4,7,1,3,4]) s = 0.1 tx = [1+s,3-s] ty = [1+s,3-s] with suppress_warnings() as sup: r = sup.record(UserWarning, "\nThe coefficients of the spline") lut = LSQBivariateSpline(x, y, z, tx, ty, kx=1, ky=1) assert_equal(len(r), 1) tx, ty = lut.get_knots() tz = lut(tx, ty) trpz = .25*(diff(tx)[:,None]*diff(ty)[None,:] * (tz[:-1,:-1]+tz[1:,:-1]+tz[:-1,1:]+tz[1:,1:])).sum() assert_almost_equal(lut.integral(tx[0], tx[-1], ty[0], ty[-1]), trpz) def test_empty_input(self): # Test whether empty inputs returns an empty output. Ticket 1014 x = [1,1,1,2,2,2,3,3,3] y = [1,2,3,1,2,3,1,2,3] z = [3,3,3,3,3,3,3,3,3] s = 0.1 tx = [1+s,3-s] ty = [1+s,3-s] with suppress_warnings() as sup: r = sup.record(UserWarning, "\nThe coefficients of the spline") lut = LSQBivariateSpline(x, y, z, tx, ty, kx=1, ky=1) assert_equal(len(r), 1) assert_array_equal(lut([], []), np.zeros((0,0))) assert_array_equal(lut([], [], grid=False), np.zeros((0,))) def test_invalid_input(self): s = 0.1 tx = [1 + s, 3 - s] ty = [1 + s, 3 - s] with assert_raises(ValueError) as info: x = np.linspace(1.0, 10.0) y = np.linspace(1.0, 10.0) z = np.linspace(1.0, 10.0, num=10) LSQBivariateSpline(x, y, z, tx, ty) assert "x, y, and z should have a same length" in str(info.value) with assert_raises(ValueError) as info: x = np.linspace(1.0, 10.0) y = np.linspace(1.0, 10.0) z = np.linspace(1.0, 10.0) w = np.linspace(1.0, 10.0, num=20) LSQBivariateSpline(x, y, z, tx, ty, w=w) assert "x, y, z, and w should have a same length" in str(info.value) with assert_raises(ValueError) as info: w = np.linspace(-1.0, 10.0) LSQBivariateSpline(x, y, z, tx, ty, w=w) assert "w should be positive" in str(info.value) with assert_raises(ValueError) as info: bbox = (-100, 100, -100) LSQBivariateSpline(x, y, z, tx, ty, bbox=bbox) assert "bbox shape should be (4,)" in str(info.value) with assert_raises(ValueError) as info: LSQBivariateSpline(x, y, z, tx, ty, kx=10, ky=10) assert "The length of x, y and z should be at least (kx+1) * (ky+1)" in \ str(info.value) with assert_raises(ValueError) as exc_info: LSQBivariateSpline(x, y, z, tx, ty, eps=0.0) assert "eps should be between (0, 1)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: LSQBivariateSpline(x, y, z, tx, ty, eps=1.0) assert "eps should be between (0, 1)" in str(exc_info.value) def test_array_like_input(self): s = 0.1 tx = np.array([1 + s, 3 - s]) ty = np.array([1 + s, 3 - s]) x = np.linspace(1.0, 10.0) y = np.linspace(1.0, 10.0) z = np.linspace(1.0, 10.0) w = np.linspace(1.0, 10.0) bbox = np.array([1.0, 10.0, 1.0, 10.0]) with suppress_warnings() as sup: r = sup.record(UserWarning, "\nThe coefficients of the spline") # np.array input spl1 = LSQBivariateSpline(x, y, z, tx, ty, w=w, bbox=bbox) # list input spl2 = LSQBivariateSpline(x.tolist(), y.tolist(), z.tolist(), tx.tolist(), ty.tolist(), w=w.tolist(), bbox=bbox) assert_allclose(spl1(2.0, 2.0), spl2(2.0, 2.0)) assert_equal(len(r), 2) def test_unequal_length_of_knots(self): """Test for the case when the input knot-location arrays in x and y are of different lengths. """ x, y = np.mgrid[0:100, 0:100] x = x.ravel() y = y.ravel() z = 3.0 * np.ones_like(x) tx = np.linspace(0.1, 98.0, 29) ty = np.linspace(0.1, 98.0, 33) with suppress_warnings() as sup: r = sup.record(UserWarning, "\nThe coefficients of the spline") lut = LSQBivariateSpline(x,y,z,tx,ty) assert_equal(len(r), 1) assert_almost_equal(lut(x, y, grid=False), z) class TestSmoothBivariateSpline: def test_linear_constant(self): x = [1,1,1,2,2,2,3,3,3] y = [1,2,3,1,2,3,1,2,3] z = [3,3,3,3,3,3,3,3,3] lut = SmoothBivariateSpline(x,y,z,kx=1,ky=1) assert_array_almost_equal(lut.get_knots(),([1,1,3,3],[1,1,3,3])) assert_array_almost_equal(lut.get_coeffs(),[3,3,3,3]) assert_almost_equal(lut.get_residual(),0.0) assert_array_almost_equal(lut([1,1.5,2],[1,1.5]),[[3,3],[3,3],[3,3]]) def test_linear_1d(self): x = [1,1,1,2,2,2,3,3,3] y = [1,2,3,1,2,3,1,2,3] z = [0,0,0,2,2,2,4,4,4] lut = SmoothBivariateSpline(x,y,z,kx=1,ky=1) assert_array_almost_equal(lut.get_knots(),([1,1,3,3],[1,1,3,3])) assert_array_almost_equal(lut.get_coeffs(),[0,0,4,4]) assert_almost_equal(lut.get_residual(),0.0) assert_array_almost_equal(lut([1,1.5,2],[1,1.5]),[[0,0],[1,1],[2,2]]) def test_integral(self): x = [1,1,1,2,2,2,4,4,4] y = [1,2,3,1,2,3,1,2,3] z = array([0,7,8,3,4,7,1,3,4]) with suppress_warnings() as sup: # This seems to fail (ier=1, see ticket 1642). sup.filter(UserWarning, "\nThe required storage space") lut = SmoothBivariateSpline(x, y, z, kx=1, ky=1, s=0) tx = [1,2,4] ty = [1,2,3] tz = lut(tx, ty) trpz = .25*(diff(tx)[:,None]*diff(ty)[None,:] * (tz[:-1,:-1]+tz[1:,:-1]+tz[:-1,1:]+tz[1:,1:])).sum() assert_almost_equal(lut.integral(tx[0], tx[-1], ty[0], ty[-1]), trpz) lut2 = SmoothBivariateSpline(x, y, z, kx=2, ky=2, s=0) assert_almost_equal(lut2.integral(tx[0], tx[-1], ty[0], ty[-1]), trpz, decimal=0) # the quadratures give 23.75 and 23.85 tz = lut(tx[:-1], ty[:-1]) trpz = .25*(diff(tx[:-1])[:,None]*diff(ty[:-1])[None,:] * (tz[:-1,:-1]+tz[1:,:-1]+tz[:-1,1:]+tz[1:,1:])).sum() assert_almost_equal(lut.integral(tx[0], tx[-2], ty[0], ty[-2]), trpz) def test_rerun_lwrk2_too_small(self): # in this setting, lwrk2 is too small in the default run. Here we # check for equality with the bisplrep/bisplev output because there, # an automatic re-run of the spline representation is done if ier>10. x = np.linspace(-2, 2, 80) y = np.linspace(-2, 2, 80) z = x + y xi = np.linspace(-1, 1, 100) yi = np.linspace(-2, 2, 100) tck = bisplrep(x, y, z) res1 = bisplev(xi, yi, tck) interp_ = SmoothBivariateSpline(x, y, z) res2 = interp_(xi, yi) assert_almost_equal(res1, res2) def test_invalid_input(self): with assert_raises(ValueError) as info: x = np.linspace(1.0, 10.0) y = np.linspace(1.0, 10.0) z = np.linspace(1.0, 10.0, num=10) SmoothBivariateSpline(x, y, z) assert "x, y, and z should have a same length" in str(info.value) with assert_raises(ValueError) as info: x = np.linspace(1.0, 10.0) y = np.linspace(1.0, 10.0) z = np.linspace(1.0, 10.0) w = np.linspace(1.0, 10.0, num=20) SmoothBivariateSpline(x, y, z, w=w) assert "x, y, z, and w should have a same length" in str(info.value) with assert_raises(ValueError) as info: w = np.linspace(-1.0, 10.0) SmoothBivariateSpline(x, y, z, w=w) assert "w should be positive" in str(info.value) with assert_raises(ValueError) as info: bbox = (-100, 100, -100) SmoothBivariateSpline(x, y, z, bbox=bbox) assert "bbox shape should be (4,)" in str(info.value) with assert_raises(ValueError) as info: SmoothBivariateSpline(x, y, z, kx=10, ky=10) assert "The length of x, y and z should be at least (kx+1) * (ky+1)" in\ str(info.value) with assert_raises(ValueError) as info: SmoothBivariateSpline(x, y, z, s=-1.0) assert "s should be s >= 0.0" in str(info.value) with assert_raises(ValueError) as exc_info: SmoothBivariateSpline(x, y, z, eps=0.0) assert "eps should be between (0, 1)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: SmoothBivariateSpline(x, y, z, eps=1.0) assert "eps should be between (0, 1)" in str(exc_info.value) def test_array_like_input(self): x = np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]) y = np.array([1, 2, 3, 1, 2, 3, 1, 2, 3]) z = np.array([3, 3, 3, 3, 3, 3, 3, 3, 3]) w = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1]) bbox = np.array([1.0, 3.0, 1.0, 3.0]) # np.array input spl1 = SmoothBivariateSpline(x, y, z, w=w, bbox=bbox, kx=1, ky=1) # list input spl2 = SmoothBivariateSpline(x.tolist(), y.tolist(), z.tolist(), bbox=bbox.tolist(), w=w.tolist(), kx=1, ky=1) assert_allclose(spl1(0.1, 0.5), spl2(0.1, 0.5)) class TestLSQSphereBivariateSpline: def setup_method(self): # define the input data and coordinates ntheta, nphi = 70, 90 theta = linspace(0.5/(ntheta - 1), 1 - 0.5/(ntheta - 1), ntheta) * pi phi = linspace(0.5/(nphi - 1), 1 - 0.5/(nphi - 1), nphi) * 2. * pi data = ones((theta.shape[0], phi.shape[0])) # define knots and extract data values at the knots knotst = theta[::5] knotsp = phi[::5] knotdata = data[::5, ::5] # calculate spline coefficients lats, lons = meshgrid(theta, phi) lut_lsq = LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), knotst, knotsp) self.lut_lsq = lut_lsq self.data = knotdata self.new_lons, self.new_lats = knotsp, knotst def test_linear_constant(self): assert_almost_equal(self.lut_lsq.get_residual(), 0.0) assert_array_almost_equal(self.lut_lsq(self.new_lats, self.new_lons), self.data) def test_empty_input(self): assert_array_almost_equal(self.lut_lsq([], []), np.zeros((0,0))) assert_array_almost_equal(self.lut_lsq([], [], grid=False), np.zeros((0,))) def test_invalid_input(self): ntheta, nphi = 70, 90 theta = linspace(0.5 / (ntheta - 1), 1 - 0.5 / (ntheta - 1), ntheta) * pi phi = linspace(0.5 / (nphi - 1), 1 - 0.5 / (nphi - 1), nphi) * 2. * pi data = ones((theta.shape[0], phi.shape[0])) # define knots and extract data values at the knots knotst = theta[::5] knotsp = phi[::5] with assert_raises(ValueError) as exc_info: invalid_theta = linspace(-0.1, 1.0, num=ntheta) * pi invalid_lats, lons = meshgrid(invalid_theta, phi) LSQSphereBivariateSpline(invalid_lats.ravel(), lons.ravel(), data.T.ravel(), knotst, knotsp) assert "theta should be between [0, pi]" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_theta = linspace(0.1, 1.1, num=ntheta) * pi invalid_lats, lons = meshgrid(invalid_theta, phi) LSQSphereBivariateSpline(invalid_lats.ravel(), lons.ravel(), data.T.ravel(), knotst, knotsp) assert "theta should be between [0, pi]" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_phi = linspace(-0.1, 1.0, num=ntheta) * 2.0 * pi lats, invalid_lons = meshgrid(theta, invalid_phi) LSQSphereBivariateSpline(lats.ravel(), invalid_lons.ravel(), data.T.ravel(), knotst, knotsp) assert "phi should be between [0, 2pi]" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_phi = linspace(0.0, 1.1, num=ntheta) * 2.0 * pi lats, invalid_lons = meshgrid(theta, invalid_phi) LSQSphereBivariateSpline(lats.ravel(), invalid_lons.ravel(), data.T.ravel(), knotst, knotsp) assert "phi should be between [0, 2pi]" in str(exc_info.value) lats, lons = meshgrid(theta, phi) with assert_raises(ValueError) as exc_info: invalid_knotst = np.copy(knotst) invalid_knotst[0] = -0.1 LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), invalid_knotst, knotsp) assert "tt should be between (0, pi)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_knotst = np.copy(knotst) invalid_knotst[0] = pi LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), invalid_knotst, knotsp) assert "tt should be between (0, pi)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_knotsp = np.copy(knotsp) invalid_knotsp[0] = -0.1 LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), knotst, invalid_knotsp) assert "tp should be between (0, 2pi)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_knotsp = np.copy(knotsp) invalid_knotsp[0] = 2 * pi LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), knotst, invalid_knotsp) assert "tp should be between (0, 2pi)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_w = array([-1.0, 1.0, 1.5, 0.5, 1.0, 1.5, 0.5, 1.0, 1.0]) LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), knotst, knotsp, w=invalid_w) assert "w should be positive" in str(exc_info.value) with assert_raises(ValueError) as exc_info: LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), knotst, knotsp, eps=0.0) assert "eps should be between (0, 1)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), knotst, knotsp, eps=1.0) assert "eps should be between (0, 1)" in str(exc_info.value) def test_array_like_input(self): ntheta, nphi = 70, 90 theta = linspace(0.5 / (ntheta - 1), 1 - 0.5 / (ntheta - 1), ntheta) * pi phi = linspace(0.5 / (nphi - 1), 1 - 0.5 / (nphi - 1), nphi) * 2. * pi lats, lons = meshgrid(theta, phi) data = ones((theta.shape[0], phi.shape[0])) # define knots and extract data values at the knots knotst = theta[::5] knotsp = phi[::5] w = ones((lats.ravel().shape[0])) # np.array input spl1 = LSQSphereBivariateSpline(lats.ravel(), lons.ravel(), data.T.ravel(), knotst, knotsp, w=w) # list input spl2 = LSQSphereBivariateSpline(lats.ravel().tolist(), lons.ravel().tolist(), data.T.ravel().tolist(), knotst.tolist(), knotsp.tolist(), w=w.tolist()) assert_array_almost_equal(spl1(1.0, 1.0), spl2(1.0, 1.0)) class TestSmoothSphereBivariateSpline: def setup_method(self): theta = array([.25*pi, .25*pi, .25*pi, .5*pi, .5*pi, .5*pi, .75*pi, .75*pi, .75*pi]) phi = array([.5 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi]) r = array([3, 3, 3, 3, 3, 3, 3, 3, 3]) self.lut = SmoothSphereBivariateSpline(theta, phi, r, s=1E10) def test_linear_constant(self): assert_almost_equal(self.lut.get_residual(), 0.) assert_array_almost_equal(self.lut([1, 1.5, 2],[1, 1.5]), [[3, 3], [3, 3], [3, 3]]) def test_empty_input(self): assert_array_almost_equal(self.lut([], []), np.zeros((0,0))) assert_array_almost_equal(self.lut([], [], grid=False), np.zeros((0,))) def test_invalid_input(self): theta = array([.25 * pi, .25 * pi, .25 * pi, .5 * pi, .5 * pi, .5 * pi, .75 * pi, .75 * pi, .75 * pi]) phi = array([.5 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi]) r = array([3, 3, 3, 3, 3, 3, 3, 3, 3]) with assert_raises(ValueError) as exc_info: invalid_theta = array([-0.1 * pi, .25 * pi, .25 * pi, .5 * pi, .5 * pi, .5 * pi, .75 * pi, .75 * pi, .75 * pi]) SmoothSphereBivariateSpline(invalid_theta, phi, r, s=1E10) assert "theta should be between [0, pi]" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_theta = array([.25 * pi, .25 * pi, .25 * pi, .5 * pi, .5 * pi, .5 * pi, .75 * pi, .75 * pi, 1.1 * pi]) SmoothSphereBivariateSpline(invalid_theta, phi, r, s=1E10) assert "theta should be between [0, pi]" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_phi = array([-.1 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi]) SmoothSphereBivariateSpline(theta, invalid_phi, r, s=1E10) assert "phi should be between [0, 2pi]" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_phi = array([1.0 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi, .5 * pi, pi, 2.1 * pi]) SmoothSphereBivariateSpline(theta, invalid_phi, r, s=1E10) assert "phi should be between [0, 2pi]" in str(exc_info.value) with assert_raises(ValueError) as exc_info: invalid_w = array([-1.0, 1.0, 1.5, 0.5, 1.0, 1.5, 0.5, 1.0, 1.0]) SmoothSphereBivariateSpline(theta, phi, r, w=invalid_w, s=1E10) assert "w should be positive" in str(exc_info.value) with assert_raises(ValueError) as exc_info: SmoothSphereBivariateSpline(theta, phi, r, s=-1.0) assert "s should be positive" in str(exc_info.value) with assert_raises(ValueError) as exc_info: SmoothSphereBivariateSpline(theta, phi, r, eps=-1.0) assert "eps should be between (0, 1)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: SmoothSphereBivariateSpline(theta, phi, r, eps=1.0) assert "eps should be between (0, 1)" in str(exc_info.value) def test_array_like_input(self): theta = np.array([.25 * pi, .25 * pi, .25 * pi, .5 * pi, .5 * pi, .5 * pi, .75 * pi, .75 * pi, .75 * pi]) phi = np.array([.5 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi, .5 * pi, pi, 1.5 * pi]) r = np.array([3, 3, 3, 3, 3, 3, 3, 3, 3]) w = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]) # np.array input spl1 = SmoothSphereBivariateSpline(theta, phi, r, w=w, s=1E10) # list input spl2 = SmoothSphereBivariateSpline(theta.tolist(), phi.tolist(), r.tolist(), w=w.tolist(), s=1E10) assert_array_almost_equal(spl1(1.0, 1.0), spl2(1.0, 1.0)) class TestRectBivariateSpline: def test_defaults(self): x = array([1,2,3,4,5]) y = array([1,2,3,4,5]) z = array([[1,2,1,2,1],[1,2,1,2,1],[1,2,3,2,1],[1,2,2,2,1],[1,2,1,2,1]]) lut = RectBivariateSpline(x,y,z) assert_array_almost_equal(lut(x,y),z) def test_evaluate(self): x = array([1,2,3,4,5]) y = array([1,2,3,4,5]) z = array([[1,2,1,2,1],[1,2,1,2,1],[1,2,3,2,1],[1,2,2,2,1],[1,2,1,2,1]]) lut = RectBivariateSpline(x,y,z) xi = [1, 2.3, 5.3, 0.5, 3.3, 1.2, 3] yi = [1, 3.3, 1.2, 4.0, 5.0, 1.0, 3] zi = lut.ev(xi, yi) zi2 = array([lut(xp, yp)[0,0] for xp, yp in zip(xi, yi)]) assert_almost_equal(zi, zi2) def test_derivatives_grid(self): x = array([1,2,3,4,5]) y = array([1,2,3,4,5]) z = array([[1,2,1,2,1],[1,2,1,2,1],[1,2,3,2,1],[1,2,2,2,1],[1,2,1,2,1]]) dx = array([[0,0,-20,0,0],[0,0,13,0,0],[0,0,4,0,0], [0,0,-11,0,0],[0,0,4,0,0]])/6. dy = array([[4,-1,0,1,-4],[4,-1,0,1,-4],[0,1.5,0,-1.5,0], [2,.25,0,-.25,-2],[4,-1,0,1,-4]]) dxdy = array([[40,-25,0,25,-40],[-26,16.25,0,-16.25,26], [-8,5,0,-5,8],[22,-13.75,0,13.75,-22],[-8,5,0,-5,8]])/6. lut = RectBivariateSpline(x,y,z) assert_array_almost_equal(lut(x,y,dx=1),dx) assert_array_almost_equal(lut(x,y,dy=1),dy) assert_array_almost_equal(lut(x,y,dx=1,dy=1),dxdy) def test_derivatives(self): x = array([1,2,3,4,5]) y = array([1,2,3,4,5]) z = array([[1,2,1,2,1],[1,2,1,2,1],[1,2,3,2,1],[1,2,2,2,1],[1,2,1,2,1]]) dx = array([0,0,2./3,0,0]) dy = array([4,-1,0,-.25,-4]) dxdy = array([160,65,0,55,32])/24. lut = RectBivariateSpline(x,y,z) assert_array_almost_equal(lut(x,y,dx=1,grid=False),dx) assert_array_almost_equal(lut(x,y,dy=1,grid=False),dy) assert_array_almost_equal(lut(x,y,dx=1,dy=1,grid=False),dxdy) def test_broadcast(self): x = array([1,2,3,4,5]) y = array([1,2,3,4,5]) z = array([[1,2,1,2,1],[1,2,1,2,1],[1,2,3,2,1],[1,2,2,2,1],[1,2,1,2,1]]) lut = RectBivariateSpline(x,y,z) assert_allclose(lut(x, y), lut(x[:,None], y[None,:], grid=False)) def test_invalid_input(self): with assert_raises(ValueError) as info: x = array([6, 2, 3, 4, 5]) y = array([1, 2, 3, 4, 5]) z = array([[1, 2, 1, 2, 1], [1, 2, 1, 2, 1], [1, 2, 3, 2, 1], [1, 2, 2, 2, 1], [1, 2, 1, 2, 1]]) RectBivariateSpline(x, y, z) assert "x must be strictly increasing" in str(info.value) with assert_raises(ValueError) as info: x = array([1, 2, 3, 4, 5]) y = array([2, 2, 3, 4, 5]) z = array([[1, 2, 1, 2, 1], [1, 2, 1, 2, 1], [1, 2, 3, 2, 1], [1, 2, 2, 2, 1], [1, 2, 1, 2, 1]]) RectBivariateSpline(x, y, z) assert "y must be strictly increasing" in str(info.value) with assert_raises(ValueError) as info: x = array([1, 2, 3, 4, 5]) y = array([1, 2, 3, 4, 5]) z = array([[1, 2, 1, 2, 1], [1, 2, 1, 2, 1], [1, 2, 3, 2, 1], [1, 2, 2, 2, 1]]) RectBivariateSpline(x, y, z) assert "x dimension of z must have same number of elements as x"\ in str(info.value) with assert_raises(ValueError) as info: x = array([1, 2, 3, 4, 5]) y = array([1, 2, 3, 4, 5]) z = array([[1, 2, 1, 2], [1, 2, 1, 2], [1, 2, 3, 2], [1, 2, 2, 2], [1, 2, 1, 2]]) RectBivariateSpline(x, y, z) assert "y dimension of z must have same number of elements as y"\ in str(info.value) with assert_raises(ValueError) as info: x = array([1, 2, 3, 4, 5]) y = array([1, 2, 3, 4, 5]) z = array([[1, 2, 1, 2, 1], [1, 2, 1, 2, 1], [1, 2, 3, 2, 1], [1, 2, 2, 2, 1], [1, 2, 1, 2, 1]]) bbox = (-100, 100, -100) RectBivariateSpline(x, y, z, bbox=bbox) assert "bbox shape should be (4,)" in str(info.value) with assert_raises(ValueError) as info: RectBivariateSpline(x, y, z, s=-1.0) assert "s should be s >= 0.0" in str(info.value) def test_array_like_input(self): x = array([1, 2, 3, 4, 5]) y = array([1, 2, 3, 4, 5]) z = array([[1, 2, 1, 2, 1], [1, 2, 1, 2, 1], [1, 2, 3, 2, 1], [1, 2, 2, 2, 1], [1, 2, 1, 2, 1]]) bbox = array([1, 5, 1, 5]) spl1 = RectBivariateSpline(x, y, z, bbox=bbox) spl2 = RectBivariateSpline(x.tolist(), y.tolist(), z.tolist(), bbox=bbox.tolist()) assert_array_almost_equal(spl1(1.0, 1.0), spl2(1.0, 1.0)) def test_not_increasing_input(self): # gh-8565 NSamp = 20 Theta = np.random.uniform(0, np.pi, NSamp) Phi = np.random.uniform(0, 2 * np.pi, NSamp) Data = np.ones(NSamp) Interpolator = SmoothSphereBivariateSpline(Theta, Phi, Data, s=3.5) NLon = 6 NLat = 3 GridPosLats = np.arange(NLat) / NLat * np.pi GridPosLons = np.arange(NLon) / NLon * 2 * np.pi # No error Interpolator(GridPosLats, GridPosLons) nonGridPosLats = GridPosLats.copy() nonGridPosLats[2] = 0.001 with assert_raises(ValueError) as exc_info: Interpolator(nonGridPosLats, GridPosLons) assert "x must be strictly increasing" in str(exc_info.value) nonGridPosLons = GridPosLons.copy() nonGridPosLons[2] = 0.001 with assert_raises(ValueError) as exc_info: Interpolator(GridPosLats, nonGridPosLons) assert "y must be strictly increasing" in str(exc_info.value) class TestRectSphereBivariateSpline: def test_defaults(self): y = linspace(0.01, 2*pi-0.01, 7) x = linspace(0.01, pi-0.01, 7) z = array([[1,2,1,2,1,2,1],[1,2,1,2,1,2,1],[1,2,3,2,1,2,1], [1,2,2,2,1,2,1],[1,2,1,2,1,2,1],[1,2,2,2,1,2,1], [1,2,1,2,1,2,1]]) lut = RectSphereBivariateSpline(x,y,z) assert_array_almost_equal(lut(x,y),z) def test_evaluate(self): y = linspace(0.01, 2*pi-0.01, 7) x = linspace(0.01, pi-0.01, 7) z = array([[1,2,1,2,1,2,1],[1,2,1,2,1,2,1],[1,2,3,2,1,2,1], [1,2,2,2,1,2,1],[1,2,1,2,1,2,1],[1,2,2,2,1,2,1], [1,2,1,2,1,2,1]]) lut = RectSphereBivariateSpline(x,y,z) yi = [0.2, 1, 2.3, 2.35, 3.0, 3.99, 5.25] xi = [1.5, 0.4, 1.1, 0.45, 0.2345, 1., 0.0001] zi = lut.ev(xi, yi) zi2 = array([lut(xp, yp)[0,0] for xp, yp in zip(xi, yi)]) assert_almost_equal(zi, zi2) def test_derivatives_grid(self): y = linspace(0.01, 2*pi-0.01, 7) x = linspace(0.01, pi-0.01, 7) z = array([[1,2,1,2,1,2,1],[1,2,1,2,1,2,1],[1,2,3,2,1,2,1], [1,2,2,2,1,2,1],[1,2,1,2,1,2,1],[1,2,2,2,1,2,1], [1,2,1,2,1,2,1]]) lut = RectSphereBivariateSpline(x,y,z) y = linspace(0.02, 2*pi-0.02, 7) x = linspace(0.02, pi-0.02, 7) assert_allclose(lut(x, y, dtheta=1), _numdiff_2d(lut, x, y, dx=1), rtol=1e-4, atol=1e-4) assert_allclose(lut(x, y, dphi=1), _numdiff_2d(lut, x, y, dy=1), rtol=1e-4, atol=1e-4) assert_allclose(lut(x, y, dtheta=1, dphi=1), _numdiff_2d(lut, x, y, dx=1, dy=1, eps=1e-6), rtol=1e-3, atol=1e-3) def test_derivatives(self): y = linspace(0.01, 2*pi-0.01, 7) x = linspace(0.01, pi-0.01, 7) z = array([[1,2,1,2,1,2,1],[1,2,1,2,1,2,1],[1,2,3,2,1,2,1], [1,2,2,2,1,2,1],[1,2,1,2,1,2,1],[1,2,2,2,1,2,1], [1,2,1,2,1,2,1]]) lut = RectSphereBivariateSpline(x,y,z) y = linspace(0.02, 2*pi-0.02, 7) x = linspace(0.02, pi-0.02, 7) assert_equal(lut(x, y, dtheta=1, grid=False).shape, x.shape) assert_allclose(lut(x, y, dtheta=1, grid=False), _numdiff_2d(lambda x,y: lut(x,y,grid=False), x, y, dx=1), rtol=1e-4, atol=1e-4) assert_allclose(lut(x, y, dphi=1, grid=False), _numdiff_2d(lambda x,y: lut(x,y,grid=False), x, y, dy=1), rtol=1e-4, atol=1e-4) assert_allclose(lut(x, y, dtheta=1, dphi=1, grid=False), _numdiff_2d(lambda x,y: lut(x,y,grid=False), x, y, dx=1, dy=1, eps=1e-6), rtol=1e-3, atol=1e-3) def test_invalid_input(self): data = np.dot(np.atleast_2d(90. - np.linspace(-80., 80., 18)).T, np.atleast_2d(180. - np.abs(np.linspace(0., 350., 9)))).T with assert_raises(ValueError) as exc_info: lats = np.linspace(0, 170, 9) * np.pi / 180. lons = np.linspace(0, 350, 18) * np.pi / 180. RectSphereBivariateSpline(lats, lons, data) assert "u should be between (0, pi)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: lats = np.linspace(10, 180, 9) * np.pi / 180. lons = np.linspace(0, 350, 18) * np.pi / 180. RectSphereBivariateSpline(lats, lons, data) assert "u should be between (0, pi)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: lats = np.linspace(10, 170, 9) * np.pi / 180. lons = np.linspace(-181, 10, 18) * np.pi / 180. RectSphereBivariateSpline(lats, lons, data) assert "v[0] should be between [-pi, pi)" in str(exc_info.value) with assert_raises(ValueError) as exc_info: lats = np.linspace(10, 170, 9) * np.pi / 180. lons = np.linspace(-10, 360, 18) * np.pi / 180. RectSphereBivariateSpline(lats, lons, data) assert "v[-1] should be v[0] + 2pi or less" in str(exc_info.value) with assert_raises(ValueError) as exc_info: lats = np.linspace(10, 170, 9) * np.pi / 180. lons = np.linspace(10, 350, 18) * np.pi / 180. RectSphereBivariateSpline(lats, lons, data, s=-1) assert "s should be positive" in str(exc_info.value) def test_array_like_input(self): y = linspace(0.01, 2 * pi - 0.01, 7) x = linspace(0.01, pi - 0.01, 7) z = array([[1, 2, 1, 2, 1, 2, 1], [1, 2, 1, 2, 1, 2, 1], [1, 2, 3, 2, 1, 2, 1], [1, 2, 2, 2, 1, 2, 1], [1, 2, 1, 2, 1, 2, 1], [1, 2, 2, 2, 1, 2, 1], [1, 2, 1, 2, 1, 2, 1]]) # np.array input spl1 = RectSphereBivariateSpline(x, y, z) # list input spl2 = RectSphereBivariateSpline(x.tolist(), y.tolist(), z.tolist()) assert_array_almost_equal(spl1(x, y), spl2(x, y)) def test_negative_evaluation(self): lats = np.array([25, 30, 35, 40, 45]) lons = np.array([-90, -85, -80, -75, 70]) mesh = np.meshgrid(lats, lons) data = mesh[0] + mesh[1] # lon + lat value lat_r = np.radians(lats) lon_r = np.radians(lons) interpolator = RectSphereBivariateSpline(lat_r, lon_r, data) query_lat = np.radians(np.array([35, 37.5])) query_lon = np.radians(np.array([-80, -77.5])) data_interp = interpolator(query_lat, query_lon) ans = np.array([[-45.0, -42.480862], [-49.0625, -46.54315]]) assert_array_almost_equal(data_interp, ans) def _numdiff_2d(func, x, y, dx=0, dy=0, eps=1e-8): if dx == 0 and dy == 0: return func(x, y) elif dx == 1 and dy == 0: return (func(x + eps, y) - func(x - eps, y)) / (2*eps) elif dx == 0 and dy == 1: return (func(x, y + eps) - func(x, y - eps)) / (2*eps) elif dx == 1 and dy == 1: return (func(x + eps, y + eps) - func(x - eps, y + eps) - func(x + eps, y - eps) + func(x - eps, y - eps)) / (2*eps)**2 else: raise ValueError("invalid derivative order")
import os import numpy as np from numpy.testing import assert_allclose import pytest from scipy import stats from .test_continuous_basic import distcont # this is not a proper statistical test for convergence, but only # verifies that the estimate and true values don't differ by too much fit_sizes = [1000, 5000, 10000] # sample sizes to try thresh_percent = 0.25 # percent of true parameters for fail cut-off thresh_min = 0.75 # minimum difference estimate - true to fail test mle_failing_fits = [ 'burr', 'chi2', 'gausshyper', 'genexpon', 'gengamma', 'kappa4', 'ksone', 'kstwo', 'mielke', 'ncf', 'ncx2', 'pearson3', 'powerlognorm', 'truncexpon', 'tukeylambda', 'vonmises', 'levy_stable', 'trapezoid', 'studentized_range' ] mm_failing_fits = ['alpha', 'betaprime', 'burr', 'burr12', 'cauchy', 'chi', 'chi2', 'crystalball', 'dgamma', 'dweibull', 'f', 'fatiguelife', 'fisk', 'foldcauchy', 'genextreme', 'gengamma', 'genhyperbolic', 'gennorm', 'genpareto', 'halfcauchy', 'invgamma', 'invweibull', 'johnsonsu', 'kappa3', 'ksone', 'kstwo', 'levy', 'levy_l', 'levy_stable', 'loglaplace', 'lomax', 'mielke', 'nakagami', 'ncf', 'nct', 'ncx2', 'pareto', 'powerlognorm', 'powernorm', 'skewcauchy', 't', 'trapezoid', 'triang', 'tukeylambda', 'studentized_range'] # not sure if these fail, but they caused my patience to fail mm_slow_fits = ['argus', 'exponpow', 'exponweib', 'gausshyper', 'genexpon', 'genhalflogistic', 'halfgennorm', 'gompertz', 'johnsonsb', 'kappa4', 'kstwobign', 'recipinvgauss', 'skewnorm', 'truncexpon', 'vonmises', 'vonmises_line'] failing_fits = {"MM": mm_failing_fits + mm_slow_fits, "MLE": mle_failing_fits} # Don't run the fit test on these: skip_fit = [ 'erlang', # Subclass of gamma, generates a warning. ] def cases_test_cont_fit(): # this tests the closeness of the estimated parameters to the true # parameters with fit method of continuous distributions # Note: is slow, some distributions don't converge with sample # size <= 10000 for distname, arg in distcont: if distname not in skip_fit: yield distname, arg @pytest.mark.slow @pytest.mark.parametrize('distname,arg', cases_test_cont_fit()) @pytest.mark.parametrize('method', ["MLE", 'MM']) def test_cont_fit(distname, arg, method): if distname in failing_fits[method]: # Skip failing fits unless overridden try: xfail = not int(os.environ['SCIPY_XFAIL']) except Exception: xfail = True if xfail: msg = "Fitting %s doesn't work reliably yet" % distname msg += (" [Set environment variable SCIPY_XFAIL=1 to run this" " test nevertheless.]") pytest.xfail(msg) distfn = getattr(stats, distname) truearg = np.hstack([arg, [0.0, 1.0]]) diffthreshold = np.max(np.vstack([truearg*thresh_percent, np.full(distfn.numargs+2, thresh_min)]), 0) for fit_size in fit_sizes: # Note that if a fit succeeds, the other fit_sizes are skipped np.random.seed(1234) with np.errstate(all='ignore'): rvs = distfn.rvs(size=fit_size, *arg) est = distfn.fit(rvs, method=method) # start with default values diff = est - truearg # threshold for location diffthreshold[-2] = np.max([np.abs(rvs.mean())*thresh_percent, thresh_min]) if np.any(np.isnan(est)): raise AssertionError('nan returned in fit') else: if np.all(np.abs(diff) <= diffthreshold): break else: txt = 'parameter: %s\n' % str(truearg) txt += 'estimated: %s\n' % str(est) txt += 'diff : %s\n' % str(diff) raise AssertionError('fit not very good in %s\n' % distfn.name + txt) def _check_loc_scale_mle_fit(name, data, desired, atol=None): d = getattr(stats, name) actual = d.fit(data)[-2:] assert_allclose(actual, desired, atol=atol, err_msg='poor mle fit of (loc, scale) in %s' % name) def test_non_default_loc_scale_mle_fit(): data = np.array([1.01, 1.78, 1.78, 1.78, 1.88, 1.88, 1.88, 2.00]) _check_loc_scale_mle_fit('uniform', data, [1.01, 0.99], 1e-3) _check_loc_scale_mle_fit('expon', data, [1.01, 0.73875], 1e-3) def test_expon_fit(): """gh-6167""" data = [0, 0, 0, 0, 2, 2, 2, 2] phat = stats.expon.fit(data, floc=0) assert_allclose(phat, [0, 1.0], atol=1e-3)
scipy/scipy
scipy/stats/tests/test_fit.py
scipy/interpolate/tests/test_fitpack2.py
""" Method agnostic utility functions for linear progamming """ import numpy as np import scipy.sparse as sps from warnings import warn from .optimize import OptimizeWarning from scipy.optimize._remove_redundancy import ( _remove_redundancy_svd, _remove_redundancy_pivot_sparse, _remove_redundancy_pivot_dense, _remove_redundancy_id ) from collections import namedtuple _LPProblem = namedtuple('_LPProblem', 'c A_ub b_ub A_eq b_eq bounds x0') _LPProblem.__new__.__defaults__ = (None,) * 6 # make c the only required arg _LPProblem.__doc__ = \ """ Represents a linear-programming problem. Attributes ---------- c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : various valid formats, optional The bounds of ``x``, as ``min`` and ``max`` pairs. If bounds are specified for all N variables separately, valid formats are: * a 2D array (N x 2); * a sequence of N sequences, each with 2 values. If all variables have the same bounds, the bounds can be specified as a 1-D or 2-D array or sequence with 2 scalar values. If all variables have a lower bound of 0 and no upper bound, the bounds parameter can be omitted (or given as None). Absent lower and/or upper bounds can be specified as -numpy.inf (no lower bound), numpy.inf (no upper bound) or None (both). x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. Notes ----- This namedtuple supports 2 ways of initialization: >>> lp1 = _LPProblem(c=[-1, 4], A_ub=[[-3, 1], [1, 2]], b_ub=[6, 4]) >>> lp2 = _LPProblem([-1, 4], [[-3, 1], [1, 2]], [6, 4]) Note that only ``c`` is a required argument here, whereas all other arguments ``A_ub``, ``b_ub``, ``A_eq``, ``b_eq``, ``bounds``, ``x0`` are optional with default values of None. For example, ``A_eq`` and ``b_eq`` can be set without ``A_ub`` or ``b_ub``: >>> lp3 = _LPProblem(c=[-1, 4], A_eq=[[2, 1]], b_eq=[10]) """ def _check_sparse_inputs(options, meth, A_ub, A_eq): """ Check the provided ``A_ub`` and ``A_eq`` matrices conform to the specified optional sparsity variables. Parameters ---------- A_ub : 2-D array, optional 2-D array such that ``A_ub @ x`` gives the values of the upper-bound inequality constraints at ``x``. A_eq : 2-D array, optional 2-D array such that ``A_eq @ x`` gives the values of the equality constraints at ``x``. options : dict A dictionary of solver options. All methods accept the following generic options: maxiter : int Maximum number of iterations to perform. disp : bool Set to True to print convergence messages. For method-specific options, see :func:`show_options('linprog')`. method : str, optional The algorithm used to solve the standard form problem. Returns ------- A_ub : 2-D array, optional 2-D array such that ``A_ub @ x`` gives the values of the upper-bound inequality constraints at ``x``. A_eq : 2-D array, optional 2-D array such that ``A_eq @ x`` gives the values of the equality constraints at ``x``. options : dict A dictionary of solver options. All methods accept the following generic options: maxiter : int Maximum number of iterations to perform. disp : bool Set to True to print convergence messages. For method-specific options, see :func:`show_options('linprog')`. """ # This is an undocumented option for unit testing sparse presolve _sparse_presolve = options.pop('_sparse_presolve', False) if _sparse_presolve and A_eq is not None: A_eq = sps.coo_matrix(A_eq) if _sparse_presolve and A_ub is not None: A_ub = sps.coo_matrix(A_ub) sparse_constraint = sps.issparse(A_eq) or sps.issparse(A_ub) preferred_methods = {"highs", "highs-ds", "highs-ipm"} dense_methods = {"simplex", "revised simplex"} if meth in dense_methods and sparse_constraint: raise ValueError(f"Method '{meth}' does not support sparse " "constraint matrices. Please consider using one of " f"{preferred_methods}.") sparse = options.get('sparse', False) if not sparse and sparse_constraint and meth == 'interior-point': options['sparse'] = True warn("Sparse constraint matrix detected; setting 'sparse':True.", OptimizeWarning, stacklevel=4) return options, A_ub, A_eq def _format_A_constraints(A, n_x, sparse_lhs=False): """Format the left hand side of the constraints to a 2-D array Parameters ---------- A : 2-D array 2-D array such that ``A @ x`` gives the values of the upper-bound (in)equality constraints at ``x``. n_x : int The number of variables in the linear programming problem. sparse_lhs : bool Whether either of `A_ub` or `A_eq` are sparse. If true return a coo_matrix instead of a numpy array. Returns ------- np.ndarray or sparse.coo_matrix 2-D array such that ``A @ x`` gives the values of the upper-bound (in)equality constraints at ``x``. """ if sparse_lhs: return sps.coo_matrix( (0, n_x) if A is None else A, dtype=float, copy=True ) elif A is None: return np.zeros((0, n_x), dtype=float) else: return np.array(A, dtype=float, copy=True) def _format_b_constraints(b): """Format the upper bounds of the constraints to a 1-D array Parameters ---------- b : 1-D array 1-D array of values representing the upper-bound of each (in)equality constraint (row) in ``A``. Returns ------- 1-D np.array 1-D array of values representing the upper-bound of each (in)equality constraint (row) in ``A``. """ if b is None: return np.array([], dtype=float) b = np.array(b, dtype=float, copy=True).squeeze() return b if b.size != 1 else b.reshape((-1)) def _clean_inputs(lp): """ Given user inputs for a linear programming problem, return the objective vector, upper bound constraints, equality constraints, and simple bounds in a preferred format. Parameters ---------- lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : various valid formats, optional The bounds of ``x``, as ``min`` and ``max`` pairs. If bounds are specified for all N variables separately, valid formats are: * a 2D array (2 x N or N x 2); * a sequence of N sequences, each with 2 values. If all variables have the same bounds, a single pair of values can be specified. Valid formats are: * a sequence with 2 scalar values; * a sequence with a single element containing 2 scalar values. If all variables have a lower bound of 0 and no upper bound, the bounds parameter can be omitted (or given as None). x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. Returns ------- lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : 2D array The bounds of ``x``, as ``min`` and ``max`` pairs, one for each of the N elements of ``x``. The N x 2 array contains lower bounds in the first column and upper bounds in the 2nd. Unbounded variables have lower bound -np.inf and/or upper bound np.inf. x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. """ c, A_ub, b_ub, A_eq, b_eq, bounds, x0 = lp if c is None: raise TypeError try: c = np.array(c, dtype=np.float64, copy=True).squeeze() except ValueError as e: raise TypeError( "Invalid input for linprog: c must be a 1-D array of numerical " "coefficients") from e else: # If c is a single value, convert it to a 1-D array. if c.size == 1: c = c.reshape((-1)) n_x = len(c) if n_x == 0 or len(c.shape) != 1: raise ValueError( "Invalid input for linprog: c must be a 1-D array and must " "not have more than one non-singleton dimension") if not(np.isfinite(c).all()): raise ValueError( "Invalid input for linprog: c must not contain values " "inf, nan, or None") sparse_lhs = sps.issparse(A_eq) or sps.issparse(A_ub) try: A_ub = _format_A_constraints(A_ub, n_x, sparse_lhs=sparse_lhs) except ValueError as e: raise TypeError( "Invalid input for linprog: A_ub must be a 2-D array " "of numerical values") from e else: n_ub = A_ub.shape[0] if len(A_ub.shape) != 2 or A_ub.shape[1] != n_x: raise ValueError( "Invalid input for linprog: A_ub must have exactly two " "dimensions, and the number of columns in A_ub must be " "equal to the size of c") if (sps.issparse(A_ub) and not np.isfinite(A_ub.data).all() or not sps.issparse(A_ub) and not np.isfinite(A_ub).all()): raise ValueError( "Invalid input for linprog: A_ub must not contain values " "inf, nan, or None") try: b_ub = _format_b_constraints(b_ub) except ValueError as e: raise TypeError( "Invalid input for linprog: b_ub must be a 1-D array of " "numerical values, each representing the upper bound of an " "inequality constraint (row) in A_ub") from e else: if b_ub.shape != (n_ub,): raise ValueError( "Invalid input for linprog: b_ub must be a 1-D array; b_ub " "must not have more than one non-singleton dimension and " "the number of rows in A_ub must equal the number of values " "in b_ub") if not(np.isfinite(b_ub).all()): raise ValueError( "Invalid input for linprog: b_ub must not contain values " "inf, nan, or None") try: A_eq = _format_A_constraints(A_eq, n_x, sparse_lhs=sparse_lhs) except ValueError as e: raise TypeError( "Invalid input for linprog: A_eq must be a 2-D array " "of numerical values") from e else: n_eq = A_eq.shape[0] if len(A_eq.shape) != 2 or A_eq.shape[1] != n_x: raise ValueError( "Invalid input for linprog: A_eq must have exactly two " "dimensions, and the number of columns in A_eq must be " "equal to the size of c") if (sps.issparse(A_eq) and not np.isfinite(A_eq.data).all() or not sps.issparse(A_eq) and not np.isfinite(A_eq).all()): raise ValueError( "Invalid input for linprog: A_eq must not contain values " "inf, nan, or None") try: b_eq = _format_b_constraints(b_eq) except ValueError as e: raise TypeError( "Invalid input for linprog: b_eq must be a dense, 1-D array of " "numerical values, each representing the right hand side of an " "equality constraint (row) in A_eq") from e else: if b_eq.shape != (n_eq,): raise ValueError( "Invalid input for linprog: b_eq must be a 1-D array; b_eq " "must not have more than one non-singleton dimension and " "the number of rows in A_eq must equal the number of values " "in b_eq") if not(np.isfinite(b_eq).all()): raise ValueError( "Invalid input for linprog: b_eq must not contain values " "inf, nan, or None") # x0 gives a (optional) starting solution to the solver. If x0 is None, # skip the checks. Initial solution will be generated automatically. if x0 is not None: try: x0 = np.array(x0, dtype=float, copy=True).squeeze() except ValueError as e: raise TypeError( "Invalid input for linprog: x0 must be a 1-D array of " "numerical coefficients") from e if x0.ndim == 0: x0 = x0.reshape((-1)) if len(x0) == 0 or x0.ndim != 1: raise ValueError( "Invalid input for linprog: x0 should be a 1-D array; it " "must not have more than one non-singleton dimension") if not x0.size == c.size: raise ValueError( "Invalid input for linprog: x0 and c should contain the " "same number of elements") if not np.isfinite(x0).all(): raise ValueError( "Invalid input for linprog: x0 must not contain values " "inf, nan, or None") # Bounds can be one of these formats: # (1) a 2-D array or sequence, with shape N x 2 # (2) a 1-D or 2-D sequence or array with 2 scalars # (3) None (or an empty sequence or array) # Unspecified bounds can be represented by None or (-)np.inf. # All formats are converted into a N x 2 np.array with (-)np.inf where # bounds are unspecified. # Prepare clean bounds array bounds_clean = np.zeros((n_x, 2), dtype=float) # Convert to a numpy array. # np.array(..,dtype=float) raises an error if dimensions are inconsistent # or if there are invalid data types in bounds. Just add a linprog prefix # to the error and re-raise. # Creating at least a 2-D array simplifies the cases to distinguish below. if bounds is None or np.array_equal(bounds, []) or np.array_equal(bounds, [[]]): bounds = (0, np.inf) try: bounds_conv = np.atleast_2d(np.array(bounds, dtype=float)) except ValueError as e: raise ValueError( "Invalid input for linprog: unable to interpret bounds, " "check values and dimensions: " + e.args[0]) from e except TypeError as e: raise TypeError( "Invalid input for linprog: unable to interpret bounds, " "check values and dimensions: " + e.args[0]) from e # Check bounds options bsh = bounds_conv.shape if len(bsh) > 2: # Do not try to handle multidimensional bounds input raise ValueError( "Invalid input for linprog: provide a 2-D array for bounds, " "not a {:d}-D array.".format(len(bsh))) elif np.all(bsh == (n_x, 2)): # Regular N x 2 array bounds_clean = bounds_conv elif (np.all(bsh == (2, 1)) or np.all(bsh == (1, 2))): # 2 values: interpret as overall lower and upper bound bounds_flat = bounds_conv.flatten() bounds_clean[:, 0] = bounds_flat[0] bounds_clean[:, 1] = bounds_flat[1] elif np.all(bsh == (2, n_x)): # Reject a 2 x N array raise ValueError( "Invalid input for linprog: provide a {:d} x 2 array for bounds, " "not a 2 x {:d} array.".format(n_x, n_x)) else: raise ValueError( "Invalid input for linprog: unable to interpret bounds with this " "dimension tuple: {0}.".format(bsh)) # The process above creates nan-s where the input specified None # Convert the nan-s in the 1st column to -np.inf and in the 2nd column # to np.inf i_none = np.isnan(bounds_clean[:, 0]) bounds_clean[i_none, 0] = -np.inf i_none = np.isnan(bounds_clean[:, 1]) bounds_clean[i_none, 1] = np.inf return _LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds_clean, x0) def _presolve(lp, rr, rr_method, tol=1e-9): """ Given inputs for a linear programming problem in preferred format, presolve the problem: identify trivial infeasibilities, redundancies, and unboundedness, tighten bounds where possible, and eliminate fixed variables. Parameters ---------- lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : 2D array The bounds of ``x``, as ``min`` and ``max`` pairs, one for each of the N elements of ``x``. The N x 2 array contains lower bounds in the first column and upper bounds in the 2nd. Unbounded variables have lower bound -np.inf and/or upper bound np.inf. x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. rr : bool If ``True`` attempts to eliminate any redundant rows in ``A_eq``. Set False if ``A_eq`` is known to be of full row rank, or if you are looking for a potential speedup (at the expense of reliability). rr_method : string Method used to identify and remove redundant rows from the equality constraint matrix after presolve. tol : float The tolerance which determines when a solution is "close enough" to zero in Phase 1 to be considered a basic feasible solution or close enough to positive to serve as an optimal solution. Returns ------- lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : 2D array The bounds of ``x``, as ``min`` and ``max`` pairs, possibly tightened. x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. c0 : 1D array Constant term in objective function due to fixed (and eliminated) variables. x : 1D array Solution vector (when the solution is trivial and can be determined in presolve) revstack: list of functions the functions in the list reverse the operations of _presolve() the function signature is x_org = f(x_mod), where x_mod is the result of a presolve step and x_org the value at the start of the step (currently, the revstack contains only one function) complete: bool Whether the solution is complete (solved or determined to be infeasible or unbounded in presolve) status : int An integer representing the exit status of the optimization:: 0 : Optimization terminated successfully 1 : Iteration limit reached 2 : Problem appears to be infeasible 3 : Problem appears to be unbounded 4 : Serious numerical difficulties encountered message : str A string descriptor of the exit status of the optimization. References ---------- .. [5] Andersen, Erling D. "Finding all linearly dependent rows in large-scale linear programming." Optimization Methods and Software 6.3 (1995): 219-227. .. [8] Andersen, Erling D., and Knud D. Andersen. "Presolving in linear programming." Mathematical Programming 71.2 (1995): 221-245. """ # ideas from Reference [5] by Andersen and Andersen # however, unlike the reference, this is performed before converting # problem to standard form # There are a few advantages: # * artificial variables have not been added, so matrices are smaller # * bounds have not been converted to constraints yet. (It is better to # do that after presolve because presolve may adjust the simple bounds.) # There are many improvements that can be made, namely: # * implement remaining checks from [5] # * loop presolve until no additional changes are made # * implement additional efficiency improvements in redundancy removal [2] c, A_ub, b_ub, A_eq, b_eq, bounds, x0 = lp revstack = [] # record of variables eliminated from problem # constant term in cost function may be added if variables are eliminated c0 = 0 complete = False # complete is True if detected infeasible/unbounded x = np.zeros(c.shape) # this is solution vector if completed in presolve status = 0 # all OK unless determined otherwise message = "" # Lower and upper bounds. Copy to prevent feedback. lb = bounds[:, 0].copy() ub = bounds[:, 1].copy() m_eq, n = A_eq.shape m_ub, n = A_ub.shape if (rr_method is not None and rr_method.lower() not in {"svd", "pivot", "id"}): message = ("'" + str(rr_method) + "' is not a valid option " "for redundancy removal. Valid options are 'SVD', " "'pivot', and 'ID'.") raise ValueError(message) if sps.issparse(A_eq): A_eq = A_eq.tocsr() A_ub = A_ub.tocsr() def where(A): return A.nonzero() vstack = sps.vstack else: where = np.where vstack = np.vstack # upper bounds > lower bounds if np.any(ub < lb) or np.any(lb == np.inf) or np.any(ub == -np.inf): status = 2 message = ("The problem is (trivially) infeasible since one " "or more upper bounds are smaller than the corresponding " "lower bounds, a lower bound is np.inf or an upper bound " "is -np.inf.") complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) # zero row in equality constraints zero_row = np.array(np.sum(A_eq != 0, axis=1) == 0).flatten() if np.any(zero_row): if np.any( np.logical_and( zero_row, np.abs(b_eq) > tol)): # test_zero_row_1 # infeasible if RHS is not zero status = 2 message = ("The problem is (trivially) infeasible due to a row " "of zeros in the equality constraint matrix with a " "nonzero corresponding constraint value.") complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) else: # test_zero_row_2 # if RHS is zero, we can eliminate this equation entirely A_eq = A_eq[np.logical_not(zero_row), :] b_eq = b_eq[np.logical_not(zero_row)] # zero row in inequality constraints zero_row = np.array(np.sum(A_ub != 0, axis=1) == 0).flatten() if np.any(zero_row): if np.any(np.logical_and(zero_row, b_ub < -tol)): # test_zero_row_1 # infeasible if RHS is less than zero (because LHS is zero) status = 2 message = ("The problem is (trivially) infeasible due to a row " "of zeros in the equality constraint matrix with a " "nonzero corresponding constraint value.") complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) else: # test_zero_row_2 # if LHS is >= 0, we can eliminate this constraint entirely A_ub = A_ub[np.logical_not(zero_row), :] b_ub = b_ub[np.logical_not(zero_row)] # zero column in (both) constraints # this indicates that a variable isn't constrained and can be removed A = vstack((A_eq, A_ub)) if A.shape[0] > 0: zero_col = np.array(np.sum(A != 0, axis=0) == 0).flatten() # variable will be at upper or lower bound, depending on objective x[np.logical_and(zero_col, c < 0)] = ub[ np.logical_and(zero_col, c < 0)] x[np.logical_and(zero_col, c > 0)] = lb[ np.logical_and(zero_col, c > 0)] if np.any(np.isinf(x)): # if an unconstrained variable has no bound status = 3 message = ("If feasible, the problem is (trivially) unbounded " "due to a zero column in the constraint matrices. If " "you wish to check whether the problem is infeasible, " "turn presolve off.") complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) # variables will equal upper/lower bounds will be removed later lb[np.logical_and(zero_col, c < 0)] = ub[ np.logical_and(zero_col, c < 0)] ub[np.logical_and(zero_col, c > 0)] = lb[ np.logical_and(zero_col, c > 0)] # row singleton in equality constraints # this fixes a variable and removes the constraint singleton_row = np.array(np.sum(A_eq != 0, axis=1) == 1).flatten() rows = where(singleton_row)[0] cols = where(A_eq[rows, :])[1] if len(rows) > 0: for row, col in zip(rows, cols): val = b_eq[row] / A_eq[row, col] if not lb[col] - tol <= val <= ub[col] + tol: # infeasible if fixed value is not within bounds status = 2 message = ("The problem is (trivially) infeasible because a " "singleton row in the equality constraints is " "inconsistent with the bounds.") complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) else: # sets upper and lower bounds at that fixed value - variable # will be removed later lb[col] = val ub[col] = val A_eq = A_eq[np.logical_not(singleton_row), :] b_eq = b_eq[np.logical_not(singleton_row)] # row singleton in inequality constraints # this indicates a simple bound and the constraint can be removed # simple bounds may be adjusted here # After all of the simple bound information is combined here, get_Abc will # turn the simple bounds into constraints singleton_row = np.array(np.sum(A_ub != 0, axis=1) == 1).flatten() cols = where(A_ub[singleton_row, :])[1] rows = where(singleton_row)[0] if len(rows) > 0: for row, col in zip(rows, cols): val = b_ub[row] / A_ub[row, col] if A_ub[row, col] > 0: # upper bound if val < lb[col] - tol: # infeasible complete = True elif val < ub[col]: # new upper bound ub[col] = val else: # lower bound if val > ub[col] + tol: # infeasible complete = True elif val > lb[col]: # new lower bound lb[col] = val if complete: status = 2 message = ("The problem is (trivially) infeasible because a " "singleton row in the upper bound constraints is " "inconsistent with the bounds.") return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) A_ub = A_ub[np.logical_not(singleton_row), :] b_ub = b_ub[np.logical_not(singleton_row)] # identical bounds indicate that variable can be removed i_f = np.abs(lb - ub) < tol # indices of "fixed" variables i_nf = np.logical_not(i_f) # indices of "not fixed" variables # test_bounds_equal_but_infeasible if np.all(i_f): # if bounds define solution, check for consistency residual = b_eq - A_eq.dot(lb) slack = b_ub - A_ub.dot(lb) if ((A_ub.size > 0 and np.any(slack < 0)) or (A_eq.size > 0 and not np.allclose(residual, 0))): status = 2 message = ("The problem is (trivially) infeasible because the " "bounds fix all variables to values inconsistent with " "the constraints") complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) ub_mod = ub lb_mod = lb if np.any(i_f): c0 += c[i_f].dot(lb[i_f]) b_eq = b_eq - A_eq[:, i_f].dot(lb[i_f]) b_ub = b_ub - A_ub[:, i_f].dot(lb[i_f]) c = c[i_nf] x_undo = lb[i_f] # not x[i_f], x is just zeroes x = x[i_nf] # user guess x0 stays separate from presolve solution x if x0 is not None: x0 = x0[i_nf] A_eq = A_eq[:, i_nf] A_ub = A_ub[:, i_nf] # modify bounds lb_mod = lb[i_nf] ub_mod = ub[i_nf] def rev(x_mod): # Function to restore x: insert x_undo into x_mod. # When elements have been removed at positions k1, k2, k3, ... # then these must be replaced at (after) positions k1-1, k2-2, # k3-3, ... in the modified array to recreate the original i = np.flatnonzero(i_f) # Number of variables to restore N = len(i) index_offset = np.arange(N) # Create insert indices insert_indices = i - index_offset x_rev = np.insert(x_mod.astype(float), insert_indices, x_undo) return x_rev # Use revstack as a list of functions, currently just this one. revstack.append(rev) # no constraints indicates that problem is trivial if A_eq.size == 0 and A_ub.size == 0: b_eq = np.array([]) b_ub = np.array([]) # test_empty_constraint_1 if c.size == 0: status = 0 message = ("The solution was determined in presolve as there are " "no non-trivial constraints.") elif (np.any(np.logical_and(c < 0, ub_mod == np.inf)) or np.any(np.logical_and(c > 0, lb_mod == -np.inf))): # test_no_constraints() # test_unbounded_no_nontrivial_constraints_1 # test_unbounded_no_nontrivial_constraints_2 status = 3 message = ("The problem is (trivially) unbounded " "because there are no non-trivial constraints and " "a) at least one decision variable is unbounded " "above and its corresponding cost is negative, or " "b) at least one decision variable is unbounded below " "and its corresponding cost is positive. ") else: # test_empty_constraint_2 status = 0 message = ("The solution was determined in presolve as there are " "no non-trivial constraints.") complete = True x[c < 0] = ub_mod[c < 0] x[c > 0] = lb_mod[c > 0] # where c is zero, set x to a finite bound or zero x_zero_c = ub_mod[c == 0] x_zero_c[np.isinf(x_zero_c)] = ub_mod[c == 0][np.isinf(x_zero_c)] x_zero_c[np.isinf(x_zero_c)] = 0 x[c == 0] = x_zero_c # if this is not the last step of presolve, should convert bounds back # to array and return here # Convert modified lb and ub back into N x 2 bounds bounds = np.hstack((lb_mod[:, np.newaxis], ub_mod[:, np.newaxis])) # remove redundant (linearly dependent) rows from equality constraints n_rows_A = A_eq.shape[0] redundancy_warning = ("A_eq does not appear to be of full row rank. To " "improve performance, check the problem formulation " "for redundant equality constraints.") if (sps.issparse(A_eq)): if rr and A_eq.size > 0: # TODO: Fast sparse rank check? rr_res = _remove_redundancy_pivot_sparse(A_eq, b_eq) A_eq, b_eq, status, message = rr_res if A_eq.shape[0] < n_rows_A: warn(redundancy_warning, OptimizeWarning, stacklevel=1) if status != 0: complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) # This is a wild guess for which redundancy removal algorithm will be # faster. More testing would be good. small_nullspace = 5 if rr and A_eq.size > 0: try: # TODO: use results of first SVD in _remove_redundancy_svd rank = np.linalg.matrix_rank(A_eq) # oh well, we'll have to go with _remove_redundancy_pivot_dense except Exception: rank = 0 if rr and A_eq.size > 0 and rank < A_eq.shape[0]: warn(redundancy_warning, OptimizeWarning, stacklevel=3) dim_row_nullspace = A_eq.shape[0]-rank if rr_method is None: if dim_row_nullspace <= small_nullspace: rr_res = _remove_redundancy_svd(A_eq, b_eq) A_eq, b_eq, status, message = rr_res if dim_row_nullspace > small_nullspace or status == 4: rr_res = _remove_redundancy_pivot_dense(A_eq, b_eq) A_eq, b_eq, status, message = rr_res else: rr_method = rr_method.lower() if rr_method == "svd": rr_res = _remove_redundancy_svd(A_eq, b_eq) A_eq, b_eq, status, message = rr_res elif rr_method == "pivot": rr_res = _remove_redundancy_pivot_dense(A_eq, b_eq) A_eq, b_eq, status, message = rr_res elif rr_method == "id": rr_res = _remove_redundancy_id(A_eq, b_eq, rank) A_eq, b_eq, status, message = rr_res else: # shouldn't get here; option validity checked above pass if A_eq.shape[0] < rank: message = ("Due to numerical issues, redundant equality " "constraints could not be removed automatically. " "Try providing your constraint matrices as sparse " "matrices to activate sparse presolve, try turning " "off redundancy removal, or try turning off presolve " "altogether.") status = 4 if status != 0: complete = True return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), c0, x, revstack, complete, status, message) def _parse_linprog(lp, options, meth): """ Parse the provided linear programming problem ``_parse_linprog`` employs two main steps ``_check_sparse_inputs`` and ``_clean_inputs``. ``_check_sparse_inputs`` checks for sparsity in the provided constraints (``A_ub`` and ``A_eq) and if these match the provided sparsity optional values. ``_clean inputs`` checks of the provided inputs. If no violations are identified the objective vector, upper bound constraints, equality constraints, and simple bounds are returned in the expected format. Parameters ---------- lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : various valid formats, optional The bounds of ``x``, as ``min`` and ``max`` pairs. If bounds are specified for all N variables separately, valid formats are: * a 2D array (2 x N or N x 2); * a sequence of N sequences, each with 2 values. If all variables have the same bounds, a single pair of values can be specified. Valid formats are: * a sequence with 2 scalar values; * a sequence with a single element containing 2 scalar values. If all variables have a lower bound of 0 and no upper bound, the bounds parameter can be omitted (or given as None). x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. options : dict A dictionary of solver options. All methods accept the following generic options: maxiter : int Maximum number of iterations to perform. disp : bool Set to True to print convergence messages. For method-specific options, see :func:`show_options('linprog')`. Returns ------- lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : 2D array The bounds of ``x``, as ``min`` and ``max`` pairs, one for each of the N elements of ``x``. The N x 2 array contains lower bounds in the first column and upper bounds in the 2nd. Unbounded variables have lower bound -np.inf and/or upper bound np.inf. x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. options : dict, optional A dictionary of solver options. All methods accept the following generic options: maxiter : int Maximum number of iterations to perform. disp : bool Set to True to print convergence messages. For method-specific options, see :func:`show_options('linprog')`. """ if options is None: options = {} solver_options = {k: v for k, v in options.items()} solver_options, A_ub, A_eq = _check_sparse_inputs(solver_options, meth, lp.A_ub, lp.A_eq) # Convert lists to numpy arrays, etc... lp = _clean_inputs(lp._replace(A_ub=A_ub, A_eq=A_eq)) return lp, solver_options def _get_Abc(lp, c0): """ Given a linear programming problem of the form: Minimize:: c @ x Subject to:: A_ub @ x <= b_ub A_eq @ x == b_eq lb <= x <= ub where ``lb = 0`` and ``ub = None`` unless set in ``bounds``. Return the problem in standard form: Minimize:: c @ x Subject to:: A @ x == b x >= 0 by adding slack variables and making variable substitutions as necessary. Parameters ---------- lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : 2D array The bounds of ``x``, lower bounds in the 1st column, upper bounds in the 2nd column. The bounds are possibly tightened by the presolve procedure. x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. c0 : float Constant term in objective function due to fixed (and eliminated) variables. Returns ------- A : 2-D array 2-D array such that ``A`` @ ``x``, gives the values of the equality constraints at ``x``. b : 1-D array 1-D array of values representing the RHS of each equality constraint (row) in A (for standard form problem). c : 1-D array Coefficients of the linear objective function to be minimized (for standard form problem). c0 : float Constant term in objective function due to fixed (and eliminated) variables. x0 : 1-D array Starting values of the independent variables, which will be refined by the optimization algorithm References ---------- .. [9] Bertsimas, Dimitris, and J. Tsitsiklis. "Introduction to linear programming." Athena Scientific 1 (1997): 997. """ c, A_ub, b_ub, A_eq, b_eq, bounds, x0 = lp if sps.issparse(A_eq): sparse = True A_eq = sps.csr_matrix(A_eq) A_ub = sps.csr_matrix(A_ub) def hstack(blocks): return sps.hstack(blocks, format="csr") def vstack(blocks): return sps.vstack(blocks, format="csr") zeros = sps.csr_matrix eye = sps.eye else: sparse = False hstack = np.hstack vstack = np.vstack zeros = np.zeros eye = np.eye # Variables lbs and ubs (see below) may be changed, which feeds back into # bounds, so copy. bounds = np.array(bounds, copy=True) # modify problem such that all variables have only non-negativity bounds lbs = bounds[:, 0] ubs = bounds[:, 1] m_ub, n_ub = A_ub.shape lb_none = np.equal(lbs, -np.inf) ub_none = np.equal(ubs, np.inf) lb_some = np.logical_not(lb_none) ub_some = np.logical_not(ub_none) # unbounded below: substitute xi = -xi' (unbounded above) # if -inf <= xi <= ub, then -ub <= -xi <= inf, so swap and invert bounds l_nolb_someub = np.logical_and(lb_none, ub_some) i_nolb = np.nonzero(l_nolb_someub)[0] lbs[l_nolb_someub], ubs[l_nolb_someub] = ( -ubs[l_nolb_someub], -lbs[l_nolb_someub]) lb_none = np.equal(lbs, -np.inf) ub_none = np.equal(ubs, np.inf) lb_some = np.logical_not(lb_none) ub_some = np.logical_not(ub_none) c[i_nolb] *= -1 if x0 is not None: x0[i_nolb] *= -1 if len(i_nolb) > 0: if A_ub.shape[0] > 0: # sometimes needed for sparse arrays... weird A_ub[:, i_nolb] *= -1 if A_eq.shape[0] > 0: A_eq[:, i_nolb] *= -1 # upper bound: add inequality constraint i_newub, = ub_some.nonzero() ub_newub = ubs[ub_some] n_bounds = len(i_newub) if n_bounds > 0: shape = (n_bounds, A_ub.shape[1]) if sparse: idxs = (np.arange(n_bounds), i_newub) A_ub = vstack((A_ub, sps.csr_matrix((np.ones(n_bounds), idxs), shape=shape))) else: A_ub = vstack((A_ub, np.zeros(shape))) A_ub[np.arange(m_ub, A_ub.shape[0]), i_newub] = 1 b_ub = np.concatenate((b_ub, np.zeros(n_bounds))) b_ub[m_ub:] = ub_newub A1 = vstack((A_ub, A_eq)) b = np.concatenate((b_ub, b_eq)) c = np.concatenate((c, np.zeros((A_ub.shape[0],)))) if x0 is not None: x0 = np.concatenate((x0, np.zeros((A_ub.shape[0],)))) # unbounded: substitute xi = xi+ + xi- l_free = np.logical_and(lb_none, ub_none) i_free = np.nonzero(l_free)[0] n_free = len(i_free) c = np.concatenate((c, np.zeros(n_free))) if x0 is not None: x0 = np.concatenate((x0, np.zeros(n_free))) A1 = hstack((A1[:, :n_ub], -A1[:, i_free])) c[n_ub:n_ub+n_free] = -c[i_free] if x0 is not None: i_free_neg = x0[i_free] < 0 x0[np.arange(n_ub, A1.shape[1])[i_free_neg]] = -x0[i_free[i_free_neg]] x0[i_free[i_free_neg]] = 0 # add slack variables A2 = vstack([eye(A_ub.shape[0]), zeros((A_eq.shape[0], A_ub.shape[0]))]) A = hstack([A1, A2]) # lower bound: substitute xi = xi' + lb # now there is a constant term in objective i_shift = np.nonzero(lb_some)[0] lb_shift = lbs[lb_some].astype(float) c0 += np.sum(lb_shift * c[i_shift]) if sparse: b = b.reshape(-1, 1) A = A.tocsc() b -= (A[:, i_shift] * sps.diags(lb_shift)).sum(axis=1) b = b.ravel() else: b -= (A[:, i_shift] * lb_shift).sum(axis=1) if x0 is not None: x0[i_shift] -= lb_shift return A, b, c, c0, x0 def _round_to_power_of_two(x): """ Round elements of the array to the nearest power of two. """ return 2**np.around(np.log2(x)) def _autoscale(A, b, c, x0): """ Scales the problem according to equilibration from [12]. Also normalizes the right hand side vector by its maximum element. """ m, n = A.shape C = 1 R = 1 if A.size > 0: R = np.max(np.abs(A), axis=1) if sps.issparse(A): R = R.toarray().flatten() R[R == 0] = 1 R = 1/_round_to_power_of_two(R) A = sps.diags(R)*A if sps.issparse(A) else A*R.reshape(m, 1) b = b*R C = np.max(np.abs(A), axis=0) if sps.issparse(A): C = C.toarray().flatten() C[C == 0] = 1 C = 1/_round_to_power_of_two(C) A = A*sps.diags(C) if sps.issparse(A) else A*C c = c*C b_scale = np.max(np.abs(b)) if b.size > 0 else 1 if b_scale == 0: b_scale = 1. b = b/b_scale if x0 is not None: x0 = x0/b_scale*(1/C) return A, b, c, x0, C, b_scale def _unscale(x, C, b_scale): """ Converts solution to _autoscale problem -> solution to original problem. """ try: n = len(C) # fails if sparse or scalar; that's OK. # this is only needed for original simplex (never sparse) except TypeError: n = len(x) return x[:n]*b_scale*C def _display_summary(message, status, fun, iteration): """ Print the termination summary of the linear program Parameters ---------- message : str A string descriptor of the exit status of the optimization. status : int An integer representing the exit status of the optimization:: 0 : Optimization terminated successfully 1 : Iteration limit reached 2 : Problem appears to be infeasible 3 : Problem appears to be unbounded 4 : Serious numerical difficulties encountered fun : float Value of the objective function. iteration : iteration The number of iterations performed. """ print(message) if status in (0, 1): print(" Current function value: {0: <12.6f}".format(fun)) print(" Iterations: {0:d}".format(iteration)) def _postsolve(x, postsolve_args, complete=False): """ Given solution x to presolved, standard form linear program x, add fixed variables back into the problem and undo the variable substitutions to get solution to original linear program. Also, calculate the objective function value, slack in original upper bound constraints, and residuals in original equality constraints. Parameters ---------- x : 1-D array Solution vector to the standard-form problem. postsolve_args : tuple Data needed by _postsolve to convert the solution to the standard-form problem into the solution to the original problem, including: lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: c : 1D array The coefficients of the linear objective function to be minimized. A_ub : 2D array, optional The inequality constraint matrix. Each row of ``A_ub`` specifies the coefficients of a linear inequality constraint on ``x``. b_ub : 1D array, optional The inequality constraint vector. Each element represents an upper bound on the corresponding value of ``A_ub @ x``. A_eq : 2D array, optional The equality constraint matrix. Each row of ``A_eq`` specifies the coefficients of a linear equality constraint on ``x``. b_eq : 1D array, optional The equality constraint vector. Each element of ``A_eq @ x`` must equal the corresponding element of ``b_eq``. bounds : 2D array The bounds of ``x``, lower bounds in the 1st column, upper bounds in the 2nd column. The bounds are possibly tightened by the presolve procedure. x0 : 1D array, optional Guess values of the decision variables, which will be refined by the optimization algorithm. This argument is currently used only by the 'revised simplex' method, and can only be used if `x0` represents a basic feasible solution. revstack: list of functions the functions in the list reverse the operations of _presolve() the function signature is x_org = f(x_mod), where x_mod is the result of a presolve step and x_org the value at the start of the step complete : bool Whether the solution is was determined in presolve (``True`` if so) Returns ------- x : 1-D array Solution vector to original linear programming problem fun: float optimal objective value for original problem slack : 1-D array The (non-negative) slack in the upper bound constraints, that is, ``b_ub - A_ub @ x`` con : 1-D array The (nominally zero) residuals of the equality constraints, that is, ``b - A_eq @ x`` """ # note that all the inputs are the ORIGINAL, unmodified versions # no rows, columns have been removed (c, A_ub, b_ub, A_eq, b_eq, bounds, x0), revstack, C, b_scale = postsolve_args x = _unscale(x, C, b_scale) # Undo variable substitutions of _get_Abc() # if "complete", problem was solved in presolve; don't do anything here n_x = bounds.shape[0] if not complete and bounds is not None: # bounds are never none, probably n_unbounded = 0 for i, bi in enumerate(bounds): lbi = bi[0] ubi = bi[1] if lbi == -np.inf and ubi == np.inf: n_unbounded += 1 x[i] = x[i] - x[n_x + n_unbounded - 1] else: if lbi == -np.inf: x[i] = ubi - x[i] else: x[i] += lbi # all the rest of the variables were artificial x = x[:n_x] # If there were variables removed from the problem, add them back into the # solution vector # Apply the functions in revstack (reverse direction) for rev in reversed(revstack): x = rev(x) fun = x.dot(c) slack = b_ub - A_ub.dot(x) # report slack for ORIGINAL UB constraints # report residuals of ORIGINAL EQ constraints con = b_eq - A_eq.dot(x) return x, fun, slack, con def _check_result(x, fun, status, slack, con, bounds, tol, message): """ Check the validity of the provided solution. A valid (optimal) solution satisfies all bounds, all slack variables are negative and all equality constraint residuals are strictly non-zero. Further, the lower-bounds, upper-bounds, slack and residuals contain no nan values. Parameters ---------- x : 1-D array Solution vector to original linear programming problem fun: float optimal objective value for original problem status : int An integer representing the exit status of the optimization:: 0 : Optimization terminated successfully 1 : Iteration limit reached 2 : Problem appears to be infeasible 3 : Problem appears to be unbounded 4 : Serious numerical difficulties encountered slack : 1-D array The (non-negative) slack in the upper bound constraints, that is, ``b_ub - A_ub @ x`` con : 1-D array The (nominally zero) residuals of the equality constraints, that is, ``b - A_eq @ x`` bounds : 2D array The bounds on the original variables ``x`` message : str A string descriptor of the exit status of the optimization. tol : float Termination tolerance; see [1]_ Section 4.5. Returns ------- status : int An integer representing the exit status of the optimization:: 0 : Optimization terminated successfully 1 : Iteration limit reached 2 : Problem appears to be infeasible 3 : Problem appears to be unbounded 4 : Serious numerical difficulties encountered message : str A string descriptor of the exit status of the optimization. """ # Somewhat arbitrary tol = np.sqrt(tol) * 10 if x is None: # HiGHS does not provide x if infeasible/unbounded if status == 0: # Observed with HiGHS Simplex Primal status = 4 message = ("The solver did not provide a solution nor did it " "report a failure. Please submit a bug report.") return status, message contains_nans = ( np.isnan(x).any() or np.isnan(fun) or np.isnan(slack).any() or np.isnan(con).any() ) if contains_nans: is_feasible = False else: invalid_bounds = (x < bounds[:, 0] - tol).any() or (x > bounds[:, 1] + tol).any() invalid_slack = status != 3 and (slack < -tol).any() invalid_con = status != 3 and (np.abs(con) > tol).any() is_feasible = not (invalid_bounds or invalid_slack or invalid_con) if status == 0 and not is_feasible: status = 4 message = ("The solution does not satisfy the constraints within the " "required tolerance of " + "{:.2E}".format(tol) + ", yet " "no errors were raised and there is no certificate of " "infeasibility or unboundedness. Check whether " "the slack and constraint residuals are acceptable; " "if not, consider enabling presolve, adjusting the " "tolerance option(s), and/or using a different method. " "Please consider submitting a bug report.") elif status == 2 and is_feasible: # Occurs if the simplex method exits after phase one with a very # nearly basic feasible solution. Postsolving can make the solution # basic, however, this solution is NOT optimal status = 4 message = ("The solution is feasible, but the solver did not report " "that the solution was optimal. Please try a different " "method.") return status, message
import os import numpy as np from numpy.testing import assert_allclose import pytest from scipy import stats from .test_continuous_basic import distcont # this is not a proper statistical test for convergence, but only # verifies that the estimate and true values don't differ by too much fit_sizes = [1000, 5000, 10000] # sample sizes to try thresh_percent = 0.25 # percent of true parameters for fail cut-off thresh_min = 0.75 # minimum difference estimate - true to fail test mle_failing_fits = [ 'burr', 'chi2', 'gausshyper', 'genexpon', 'gengamma', 'kappa4', 'ksone', 'kstwo', 'mielke', 'ncf', 'ncx2', 'pearson3', 'powerlognorm', 'truncexpon', 'tukeylambda', 'vonmises', 'levy_stable', 'trapezoid', 'studentized_range' ] mm_failing_fits = ['alpha', 'betaprime', 'burr', 'burr12', 'cauchy', 'chi', 'chi2', 'crystalball', 'dgamma', 'dweibull', 'f', 'fatiguelife', 'fisk', 'foldcauchy', 'genextreme', 'gengamma', 'genhyperbolic', 'gennorm', 'genpareto', 'halfcauchy', 'invgamma', 'invweibull', 'johnsonsu', 'kappa3', 'ksone', 'kstwo', 'levy', 'levy_l', 'levy_stable', 'loglaplace', 'lomax', 'mielke', 'nakagami', 'ncf', 'nct', 'ncx2', 'pareto', 'powerlognorm', 'powernorm', 'skewcauchy', 't', 'trapezoid', 'triang', 'tukeylambda', 'studentized_range'] # not sure if these fail, but they caused my patience to fail mm_slow_fits = ['argus', 'exponpow', 'exponweib', 'gausshyper', 'genexpon', 'genhalflogistic', 'halfgennorm', 'gompertz', 'johnsonsb', 'kappa4', 'kstwobign', 'recipinvgauss', 'skewnorm', 'truncexpon', 'vonmises', 'vonmises_line'] failing_fits = {"MM": mm_failing_fits + mm_slow_fits, "MLE": mle_failing_fits} # Don't run the fit test on these: skip_fit = [ 'erlang', # Subclass of gamma, generates a warning. ] def cases_test_cont_fit(): # this tests the closeness of the estimated parameters to the true # parameters with fit method of continuous distributions # Note: is slow, some distributions don't converge with sample # size <= 10000 for distname, arg in distcont: if distname not in skip_fit: yield distname, arg @pytest.mark.slow @pytest.mark.parametrize('distname,arg', cases_test_cont_fit()) @pytest.mark.parametrize('method', ["MLE", 'MM']) def test_cont_fit(distname, arg, method): if distname in failing_fits[method]: # Skip failing fits unless overridden try: xfail = not int(os.environ['SCIPY_XFAIL']) except Exception: xfail = True if xfail: msg = "Fitting %s doesn't work reliably yet" % distname msg += (" [Set environment variable SCIPY_XFAIL=1 to run this" " test nevertheless.]") pytest.xfail(msg) distfn = getattr(stats, distname) truearg = np.hstack([arg, [0.0, 1.0]]) diffthreshold = np.max(np.vstack([truearg*thresh_percent, np.full(distfn.numargs+2, thresh_min)]), 0) for fit_size in fit_sizes: # Note that if a fit succeeds, the other fit_sizes are skipped np.random.seed(1234) with np.errstate(all='ignore'): rvs = distfn.rvs(size=fit_size, *arg) est = distfn.fit(rvs, method=method) # start with default values diff = est - truearg # threshold for location diffthreshold[-2] = np.max([np.abs(rvs.mean())*thresh_percent, thresh_min]) if np.any(np.isnan(est)): raise AssertionError('nan returned in fit') else: if np.all(np.abs(diff) <= diffthreshold): break else: txt = 'parameter: %s\n' % str(truearg) txt += 'estimated: %s\n' % str(est) txt += 'diff : %s\n' % str(diff) raise AssertionError('fit not very good in %s\n' % distfn.name + txt) def _check_loc_scale_mle_fit(name, data, desired, atol=None): d = getattr(stats, name) actual = d.fit(data)[-2:] assert_allclose(actual, desired, atol=atol, err_msg='poor mle fit of (loc, scale) in %s' % name) def test_non_default_loc_scale_mle_fit(): data = np.array([1.01, 1.78, 1.78, 1.78, 1.88, 1.88, 1.88, 2.00]) _check_loc_scale_mle_fit('uniform', data, [1.01, 0.99], 1e-3) _check_loc_scale_mle_fit('expon', data, [1.01, 0.73875], 1e-3) def test_expon_fit(): """gh-6167""" data = [0, 0, 0, 0, 2, 2, 2, 2] phat = stats.expon.fit(data, floc=0) assert_allclose(phat, [0, 1.0], atol=1e-3)
scipy/scipy
scipy/stats/tests/test_fit.py
scipy/optimize/_linprog_util.py
""" Platform for retrieving energy data from SRP. For more details about this platform, please refer to the documentation https://home-assistant.io/components/sensor.srp_energy/ """ from datetime import datetime, timedelta import logging from requests.exceptions import ( ConnectionError as ConnectError, HTTPError, Timeout) import voluptuous as vol from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, CONF_USERNAME, CONF_ID) import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.helpers.entity import Entity REQUIREMENTS = ['srpenergy==1.0.5'] _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Powered by SRP Energy" DEFAULT_NAME = 'SRP Energy' MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1440) ENERGY_KWH = 'kWh' ATTR_READING_COST = "reading_cost" ATTR_READING_TIME = 'datetime' ATTR_READING_USAGE = 'reading_usage' ATTR_DAILY_USAGE = 'daily_usage' ATTR_USAGE_HISTORY = 'usage_history' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_ID): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the SRP energy.""" name = config[CONF_NAME] username = config[CONF_USERNAME] password = config[CONF_PASSWORD] account_id = config[CONF_ID] from srpenergy.client import SrpEnergyClient srp_client = SrpEnergyClient(account_id, username, password) if not srp_client.validate(): _LOGGER.error("Couldn't connect to %s. Check credentials", name) return add_entities([SrpEnergy(name, srp_client)], True) class SrpEnergy(Entity): """Representation of an srp usage.""" def __init__(self, name, client): """Initialize SRP Usage.""" self._state = None self._name = name self._client = client self._history = None self._usage = None @property def attribution(self): """Return the attribution.""" return ATTRIBUTION @property def state(self): """Return the current state.""" if self._state is None: return None return "{0:.2f}".format(self._state) @property def name(self): """Return the name of the sensor.""" return self._name @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return ENERGY_KWH @property def history(self): """Return the energy usage history of this entity, if any.""" if self._usage is None: return None history = [{ ATTR_READING_TIME: isodate, ATTR_READING_USAGE: kwh, ATTR_READING_COST: cost } for _, _, isodate, kwh, cost in self._usage] return history @property def device_state_attributes(self): """Return the state attributes.""" attributes = { ATTR_USAGE_HISTORY: self.history } return attributes @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest usage from SRP Energy.""" start_date = datetime.now() + timedelta(days=-1) end_date = datetime.now() try: usage = self._client.usage(start_date, end_date) daily_usage = 0.0 for _, _, _, kwh, _ in usage: daily_usage += float(kwh) if usage: self._state = daily_usage self._usage = usage else: _LOGGER.error("Unable to fetch data from SRP. No data") except (ConnectError, HTTPError, Timeout) as error: _LOGGER.error("Unable to connect to SRP. %s", error) except ValueError as error: _LOGGER.error("Value error connecting to SRP. %s", error) except TypeError as error: _LOGGER.error("Type error connecting to SRP. " "Check username and password. %s", error)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/sensor/srp_energy.py
""" Helpers for Zigbee Home Automation. For more details about this component, please refer to the documentation at https://home-assistant.io/components/zha/ """ import asyncio import logging from .const import ( DEFAULT_BAUDRATE, REPORT_CONFIG_MAX_INT, REPORT_CONFIG_MIN_INT, REPORT_CONFIG_RPT_CHANGE, RadioType) _LOGGER = logging.getLogger(__name__) async def safe_read(cluster, attributes, allow_cache=True, only_cache=False, manufacturer=None): """Swallow all exceptions from network read. If we throw during initialization, setup fails. Rather have an entity that exists, but is in a maybe wrong state, than no entity. This method should probably only be used during initialization. """ try: result, _ = await cluster.read_attributes( attributes, allow_cache=allow_cache, only_cache=only_cache, manufacturer=manufacturer ) return result except Exception: # pylint: disable=broad-except return {} async def bind_cluster(entity_id, cluster): """Bind a zigbee cluster. This also swallows DeliveryError exceptions that are thrown when devices are unreachable. """ from zigpy.exceptions import DeliveryError cluster_name = cluster.ep_attribute try: res = await cluster.bind() _LOGGER.debug( "%s: bound '%s' cluster: %s", entity_id, cluster_name, res[0] ) except DeliveryError as ex: _LOGGER.debug( "%s: Failed to bind '%s' cluster: %s", entity_id, cluster_name, str(ex) ) async def configure_reporting(entity_id, cluster, attr, min_report=REPORT_CONFIG_MIN_INT, max_report=REPORT_CONFIG_MAX_INT, reportable_change=REPORT_CONFIG_RPT_CHANGE, manufacturer=None): """Configure attribute reporting for a cluster. This also swallows DeliveryError exceptions that are thrown when devices are unreachable. """ from zigpy.exceptions import DeliveryError attr_name = cluster.attributes.get(attr, [attr])[0] attr_id = get_attr_id_by_name(cluster, attr_name) cluster_name = cluster.ep_attribute kwargs = {} if manufacturer: kwargs['manufacturer'] = manufacturer try: res = await cluster.configure_reporting(attr_id, min_report, max_report, reportable_change, **kwargs) _LOGGER.debug( "%s: reporting '%s' attr on '%s' cluster: %d/%d/%d: Result: '%s'", entity_id, attr_name, cluster_name, min_report, max_report, reportable_change, res ) except DeliveryError as ex: _LOGGER.debug( "%s: failed to set reporting for '%s' attr on '%s' cluster: %s", entity_id, attr_name, cluster_name, str(ex) ) async def bind_configure_reporting(entity_id, cluster, attr, skip_bind=False, min_report=REPORT_CONFIG_MIN_INT, max_report=REPORT_CONFIG_MAX_INT, reportable_change=REPORT_CONFIG_RPT_CHANGE, manufacturer=None): """Bind and configure zigbee attribute reporting for a cluster. This also swallows DeliveryError exceptions that are thrown when devices are unreachable. """ if not skip_bind: await bind_cluster(entity_id, cluster) await configure_reporting(entity_id, cluster, attr, min_report=min_report, max_report=max_report, reportable_change=reportable_change, manufacturer=manufacturer) async def check_zigpy_connection(usb_path, radio_type, database_path): """Test zigpy radio connection.""" if radio_type == RadioType.ezsp.name: import bellows.ezsp from bellows.zigbee.application import ControllerApplication radio = bellows.ezsp.EZSP() elif radio_type == RadioType.xbee.name: import zigpy_xbee.api from zigpy_xbee.zigbee.application import ControllerApplication radio = zigpy_xbee.api.XBee() elif radio_type == RadioType.deconz.name: import zigpy_deconz.api from zigpy_deconz.zigbee.application import ControllerApplication radio = zigpy_deconz.api.Deconz() try: await radio.connect(usb_path, DEFAULT_BAUDRATE) controller = ControllerApplication(radio, database_path) await asyncio.wait_for(controller.startup(auto_form=True), timeout=30) radio.close() except Exception: # pylint: disable=broad-except return False return True def convert_ieee(ieee_str): """Convert given ieee string to EUI64.""" from zigpy.types import EUI64, uint8_t return EUI64([uint8_t(p, base=16) for p in ieee_str.split(':')]) def construct_unique_id(cluster): """Construct a unique id from a cluster.""" return "0x{:04x}:{}:0x{:04x}".format( cluster.endpoint.device.nwk, cluster.endpoint.endpoint_id, cluster.cluster_id ) def get_attr_id_by_name(cluster, attr_name): """Get the attribute id for a cluster attribute by its name.""" return next((attrid for attrid, (attrname, datatype) in cluster.attributes.items() if attr_name == attrname), None)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/zha/core/helpers.py
""" Support for the Fitbit API. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.fitbit/ """ import os import logging import datetime import time import voluptuous as vol from homeassistant.core import callback from homeassistant.components.http import HomeAssistantView from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.const import CONF_UNIT_SYSTEM from homeassistant.helpers.entity import Entity from homeassistant.helpers.icon import icon_for_battery_level import homeassistant.helpers.config_validation as cv from homeassistant.util.json import load_json, save_json REQUIREMENTS = ['fitbit==0.3.0'] _CONFIGURING = {} _LOGGER = logging.getLogger(__name__) ATTR_ACCESS_TOKEN = 'access_token' ATTR_REFRESH_TOKEN = 'refresh_token' ATTR_CLIENT_ID = 'client_id' ATTR_CLIENT_SECRET = 'client_secret' ATTR_LAST_SAVED_AT = 'last_saved_at' CONF_MONITORED_RESOURCES = 'monitored_resources' CONF_CLOCK_FORMAT = 'clock_format' CONF_ATTRIBUTION = 'Data provided by Fitbit.com' DEPENDENCIES = ['http'] FITBIT_AUTH_CALLBACK_PATH = '/api/fitbit/callback' FITBIT_AUTH_START = '/api/fitbit' FITBIT_CONFIG_FILE = 'fitbit.conf' FITBIT_DEFAULT_RESOURCES = ['activities/steps'] SCAN_INTERVAL = datetime.timedelta(minutes=30) DEFAULT_CONFIG = { 'client_id': 'CLIENT_ID_HERE', 'client_secret': 'CLIENT_SECRET_HERE' } FITBIT_RESOURCES_LIST = { 'activities/activityCalories': ['Activity Calories', 'cal', 'fire'], 'activities/calories': ['Calories', 'cal', 'fire'], 'activities/caloriesBMR': ['Calories BMR', 'cal', 'fire'], 'activities/distance': ['Distance', '', 'map-marker'], 'activities/elevation': ['Elevation', '', 'walk'], 'activities/floors': ['Floors', 'floors', 'walk'], 'activities/heart': ['Resting Heart Rate', 'bpm', 'heart-pulse'], 'activities/minutesFairlyActive': ['Minutes Fairly Active', 'minutes', 'walk'], 'activities/minutesLightlyActive': ['Minutes Lightly Active', 'minutes', 'walk'], 'activities/minutesSedentary': ['Minutes Sedentary', 'minutes', 'seat-recline-normal'], 'activities/minutesVeryActive': ['Minutes Very Active', 'minutes', 'run'], 'activities/steps': ['Steps', 'steps', 'walk'], 'activities/tracker/activityCalories': ['Tracker Activity Calories', 'cal', 'fire'], 'activities/tracker/calories': ['Tracker Calories', 'cal', 'fire'], 'activities/tracker/distance': ['Tracker Distance', '', 'map-marker'], 'activities/tracker/elevation': ['Tracker Elevation', '', 'walk'], 'activities/tracker/floors': ['Tracker Floors', 'floors', 'walk'], 'activities/tracker/minutesFairlyActive': ['Tracker Minutes Fairly Active', 'minutes', 'walk'], 'activities/tracker/minutesLightlyActive': ['Tracker Minutes Lightly Active', 'minutes', 'walk'], 'activities/tracker/minutesSedentary': ['Tracker Minutes Sedentary', 'minutes', 'seat-recline-normal'], 'activities/tracker/minutesVeryActive': ['Tracker Minutes Very Active', 'minutes', 'run'], 'activities/tracker/steps': ['Tracker Steps', 'steps', 'walk'], 'body/bmi': ['BMI', 'BMI', 'human'], 'body/fat': ['Body Fat', '%', 'human'], 'body/weight': ['Weight', '', 'human'], 'devices/battery': ['Battery', None, None], 'sleep/awakeningsCount': ['Awakenings Count', 'times awaken', 'sleep'], 'sleep/efficiency': ['Sleep Efficiency', '%', 'sleep'], 'sleep/minutesAfterWakeup': ['Minutes After Wakeup', 'minutes', 'sleep'], 'sleep/minutesAsleep': ['Sleep Minutes Asleep', 'minutes', 'sleep'], 'sleep/minutesAwake': ['Sleep Minutes Awake', 'minutes', 'sleep'], 'sleep/minutesToFallAsleep': ['Sleep Minutes to Fall Asleep', 'minutes', 'sleep'], 'sleep/startTime': ['Sleep Start Time', None, 'clock'], 'sleep/timeInBed': ['Sleep Time in Bed', 'minutes', 'hotel'] } FITBIT_MEASUREMENTS = { 'en_US': { 'duration': 'ms', 'distance': 'mi', 'elevation': 'ft', 'height': 'in', 'weight': 'lbs', 'body': 'in', 'liquids': 'fl. oz.', 'blood glucose': 'mg/dL', 'battery': '', }, 'en_GB': { 'duration': 'milliseconds', 'distance': 'kilometers', 'elevation': 'meters', 'height': 'centimeters', 'weight': 'stone', 'body': 'centimeters', 'liquids': 'milliliters', 'blood glucose': 'mmol/L', 'battery': '', }, 'metric': { 'duration': 'milliseconds', 'distance': 'kilometers', 'elevation': 'meters', 'height': 'centimeters', 'weight': 'kilograms', 'body': 'centimeters', 'liquids': 'milliliters', 'blood glucose': 'mmol/L', 'battery': '', } } BATTERY_LEVELS = { 'High': 100, 'Medium': 50, 'Low': 20, 'Empty': 0 } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_MONITORED_RESOURCES, default=FITBIT_DEFAULT_RESOURCES): vol.All(cv.ensure_list, [vol.In(FITBIT_RESOURCES_LIST)]), vol.Optional(CONF_CLOCK_FORMAT, default='24H'): vol.In(['12H', '24H']), vol.Optional(CONF_UNIT_SYSTEM, default='default'): vol.In(['en_GB', 'en_US', 'metric', 'default']) }) def request_app_setup(hass, config, add_entities, config_path, discovery_info=None): """Assist user with configuring the Fitbit dev application.""" configurator = hass.components.configurator def fitbit_configuration_callback(callback_data): """Handle configuration updates.""" config_path = hass.config.path(FITBIT_CONFIG_FILE) if os.path.isfile(config_path): config_file = load_json(config_path) if config_file == DEFAULT_CONFIG: error_msg = ("You didn't correctly modify fitbit.conf", " please try again") configurator.notify_errors(_CONFIGURING['fitbit'], error_msg) else: setup_platform(hass, config, add_entities, discovery_info) else: setup_platform(hass, config, add_entities, discovery_info) start_url = "{}{}".format(hass.config.api.base_url, FITBIT_AUTH_CALLBACK_PATH) description = """Please create a Fitbit developer app at https://dev.fitbit.com/apps/new. For the OAuth 2.0 Application Type choose Personal. Set the Callback URL to {}. They will provide you a Client ID and secret. These need to be saved into the file located at: {}. Then come back here and hit the below button. """.format(start_url, config_path) submit = "I have saved my Client ID and Client Secret into fitbit.conf." _CONFIGURING['fitbit'] = configurator.request_config( 'Fitbit', fitbit_configuration_callback, description=description, submit_caption=submit, description_image="/static/images/config_fitbit_app.png" ) def request_oauth_completion(hass): """Request user complete Fitbit OAuth2 flow.""" configurator = hass.components.configurator if "fitbit" in _CONFIGURING: configurator.notify_errors( _CONFIGURING['fitbit'], "Failed to register, please try again.") return def fitbit_configuration_callback(callback_data): """Handle configuration updates.""" start_url = '{}{}'.format(hass.config.api.base_url, FITBIT_AUTH_START) description = "Please authorize Fitbit by visiting {}".format(start_url) _CONFIGURING['fitbit'] = configurator.request_config( 'Fitbit', fitbit_configuration_callback, description=description, submit_caption="I have authorized Fitbit." ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Fitbit sensor.""" config_path = hass.config.path(FITBIT_CONFIG_FILE) if os.path.isfile(config_path): config_file = load_json(config_path) if config_file == DEFAULT_CONFIG: request_app_setup( hass, config, add_entities, config_path, discovery_info=None) return False else: save_json(config_path, DEFAULT_CONFIG) request_app_setup( hass, config, add_entities, config_path, discovery_info=None) return False if "fitbit" in _CONFIGURING: hass.components.configurator.request_done(_CONFIGURING.pop("fitbit")) import fitbit access_token = config_file.get(ATTR_ACCESS_TOKEN) refresh_token = config_file.get(ATTR_REFRESH_TOKEN) expires_at = config_file.get(ATTR_LAST_SAVED_AT) if None not in (access_token, refresh_token): authd_client = fitbit.Fitbit(config_file.get(ATTR_CLIENT_ID), config_file.get(ATTR_CLIENT_SECRET), access_token=access_token, refresh_token=refresh_token, expires_at=expires_at, refresh_cb=lambda x: None) if int(time.time()) - expires_at > 3600: authd_client.client.refresh_token() unit_system = config.get(CONF_UNIT_SYSTEM) if unit_system == 'default': authd_client.system = authd_client. \ user_profile_get()["user"]["locale"] if authd_client.system != 'en_GB': if hass.config.units.is_metric: authd_client.system = 'metric' else: authd_client.system = 'en_US' else: authd_client.system = unit_system dev = [] registered_devs = authd_client.get_devices() clock_format = config.get(CONF_CLOCK_FORMAT) for resource in config.get(CONF_MONITORED_RESOURCES): # monitor battery for all linked FitBit devices if resource == 'devices/battery': for dev_extra in registered_devs: dev.append(FitbitSensor( authd_client, config_path, resource, hass.config.units.is_metric, clock_format, dev_extra)) else: dev.append(FitbitSensor( authd_client, config_path, resource, hass.config.units.is_metric, clock_format)) add_entities(dev, True) else: oauth = fitbit.api.FitbitOauth2Client( config_file.get(ATTR_CLIENT_ID), config_file.get(ATTR_CLIENT_SECRET)) redirect_uri = '{}{}'.format(hass.config.api.base_url, FITBIT_AUTH_CALLBACK_PATH) fitbit_auth_start_url, _ = oauth.authorize_token_url( redirect_uri=redirect_uri, scope=['activity', 'heartrate', 'nutrition', 'profile', 'settings', 'sleep', 'weight']) hass.http.register_redirect(FITBIT_AUTH_START, fitbit_auth_start_url) hass.http.register_view(FitbitAuthCallbackView( config, add_entities, oauth)) request_oauth_completion(hass) class FitbitAuthCallbackView(HomeAssistantView): """Handle OAuth finish callback requests.""" requires_auth = False url = FITBIT_AUTH_CALLBACK_PATH name = 'api:fitbit:callback' def __init__(self, config, add_entities, oauth): """Initialize the OAuth callback view.""" self.config = config self.add_entities = add_entities self.oauth = oauth @callback def get(self, request): """Finish OAuth callback request.""" from oauthlib.oauth2.rfc6749.errors import MismatchingStateError from oauthlib.oauth2.rfc6749.errors import MissingTokenError hass = request.app['hass'] data = request.query response_message = """Fitbit has been successfully authorized! You can close this window now!""" result = None if data.get('code') is not None: redirect_uri = '{}{}'.format( hass.config.api.base_url, FITBIT_AUTH_CALLBACK_PATH) try: result = self.oauth.fetch_access_token(data.get('code'), redirect_uri) except MissingTokenError as error: _LOGGER.error("Missing token: %s", error) response_message = """Something went wrong when attempting authenticating with Fitbit. The error encountered was {}. Please try again!""".format(error) except MismatchingStateError as error: _LOGGER.error("Mismatched state, CSRF error: %s", error) response_message = """Something went wrong when attempting authenticating with Fitbit. The error encountered was {}. Please try again!""".format(error) else: _LOGGER.error("Unknown error when authing") response_message = """Something went wrong when attempting authenticating with Fitbit. An unknown error occurred. Please try again! """ if result is None: _LOGGER.error("Unknown error when authing") response_message = """Something went wrong when attempting authenticating with Fitbit. An unknown error occurred. Please try again! """ html_response = """<html><head><title>Fitbit Auth</title></head> <body><h1>{}</h1></body></html>""".format(response_message) if result: config_contents = { ATTR_ACCESS_TOKEN: result.get('access_token'), ATTR_REFRESH_TOKEN: result.get('refresh_token'), ATTR_CLIENT_ID: self.oauth.client_id, ATTR_CLIENT_SECRET: self.oauth.client_secret, ATTR_LAST_SAVED_AT: int(time.time()) } save_json(hass.config.path(FITBIT_CONFIG_FILE), config_contents) hass.async_add_job(setup_platform, hass, self.config, self.add_entities) return html_response class FitbitSensor(Entity): """Implementation of a Fitbit sensor.""" def __init__(self, client, config_path, resource_type, is_metric, clock_format, extra=None): """Initialize the Fitbit sensor.""" self.client = client self.config_path = config_path self.resource_type = resource_type self.is_metric = is_metric self.clock_format = clock_format self.extra = extra self._name = FITBIT_RESOURCES_LIST[self.resource_type][0] if self.extra: self._name = '{0} Battery'.format(self.extra.get('deviceVersion')) unit_type = FITBIT_RESOURCES_LIST[self.resource_type][1] if unit_type == "": split_resource = self.resource_type.split('/') try: measurement_system = FITBIT_MEASUREMENTS[self.client.system] except KeyError: if self.is_metric: measurement_system = FITBIT_MEASUREMENTS['metric'] else: measurement_system = FITBIT_MEASUREMENTS['en_US'] unit_type = measurement_system[split_resource[-1]] self._unit_of_measurement = unit_type self._state = 0 @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Icon to use in the frontend, if any.""" if self.resource_type == 'devices/battery' and self.extra: battery_level = BATTERY_LEVELS[self.extra.get('battery')] return icon_for_battery_level(battery_level=battery_level, charging=None) return 'mdi:{}'.format(FITBIT_RESOURCES_LIST[self.resource_type][2]) @property def device_state_attributes(self): """Return the state attributes.""" attrs = {} attrs[ATTR_ATTRIBUTION] = CONF_ATTRIBUTION if self.extra: attrs['model'] = self.extra.get('deviceVersion') attrs['type'] = self.extra.get('type').lower() return attrs def update(self): """Get the latest data from the Fitbit API and update the states.""" if self.resource_type == 'devices/battery' and self.extra: self._state = self.extra.get('battery') else: container = self.resource_type.replace("/", "-") response = self.client.time_series(self.resource_type, period='7d') raw_state = response[container][-1].get('value') if self.resource_type == 'activities/distance': self._state = format(float(raw_state), '.2f') elif self.resource_type == 'activities/tracker/distance': self._state = format(float(raw_state), '.2f') elif self.resource_type == 'body/bmi': self._state = format(float(raw_state), '.1f') elif self.resource_type == 'body/fat': self._state = format(float(raw_state), '.1f') elif self.resource_type == 'body/weight': self._state = format(float(raw_state), '.1f') elif self.resource_type == 'sleep/startTime': if raw_state == '': self._state = '-' elif self.clock_format == '12H': hours, minutes = raw_state.split(':') hours, minutes = int(hours), int(minutes) setting = 'AM' if hours > 12: setting = 'PM' hours -= 12 elif hours == 0: hours = 12 self._state = '{}:{:02d} {}'.format(hours, minutes, setting) else: self._state = raw_state else: if self.is_metric: self._state = raw_state else: try: self._state = '{0:,}'.format(int(raw_state)) except TypeError: self._state = raw_state if self.resource_type == 'activities/heart': self._state = response[container][-1]. \ get('value').get('restingHeartRate') token = self.client.client.session.token config_contents = { ATTR_ACCESS_TOKEN: token.get('access_token'), ATTR_REFRESH_TOKEN: token.get('refresh_token'), ATTR_CLIENT_ID: self.client.client.client_id, ATTR_CLIENT_SECRET: self.client.client.client_secret, ATTR_LAST_SAVED_AT: int(time.time()) } save_json(self.config_path, config_contents)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/sensor/fitbit.py
"""Support for EDP re:dy sensors.""" import logging from homeassistant.helpers.entity import Entity from homeassistant.components.edp_redy import EdpRedyDevice, EDP_REDY _LOGGER = logging.getLogger(__name__) DEPENDENCIES = ['edp_redy'] # Load power in watts (W) ATTR_ACTIVE_POWER = 'active_power' async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Perform the setup for re:dy devices.""" from edp_redy.session import ACTIVE_POWER_ID session = hass.data[EDP_REDY] devices = [] # Create sensors for modules for device_json in session.modules_dict.values(): if 'HA_POWER_METER' not in device_json['Capabilities']: continue devices.append(EdpRedyModuleSensor(session, device_json)) # Create a sensor for global active power devices.append(EdpRedySensor(session, ACTIVE_POWER_ID, "Power Home", 'mdi:flash', 'W')) async_add_entities(devices, True) class EdpRedySensor(EdpRedyDevice, Entity): """Representation of a EDP re:dy generic sensor.""" def __init__(self, session, sensor_id, name, icon, unit): """Initialize the sensor.""" super().__init__(session, sensor_id, name) self._icon = icon self._unit = unit @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Return the icon to use in the frontend.""" return self._icon @property def unit_of_measurement(self): """Return the unit of measurement of this sensor.""" return self._unit async def async_update(self): """Parse the data for this sensor.""" if self._id in self._session.values_dict: self._state = self._session.values_dict[self._id] self._is_available = True else: self._is_available = False class EdpRedyModuleSensor(EdpRedyDevice, Entity): """Representation of a EDP re:dy module sensor.""" def __init__(self, session, device_json): """Initialize the sensor.""" super().__init__(session, device_json['PKID'], "Power {0}".format(device_json['Name'])) @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Return the icon to use in the frontend.""" return 'mdi:flash' @property def unit_of_measurement(self): """Return the unit of measurement of this sensor.""" return 'W' async def async_update(self): """Parse the data for this sensor.""" if self._id in self._session.modules_dict: device_json = self._session.modules_dict[self._id] self._parse_data(device_json) else: self._is_available = False def _parse_data(self, data): """Parse data received from the server.""" super()._parse_data(data) _LOGGER.debug("Sensor data: %s", str(data)) for state_var in data['StateVars']: if state_var['Name'] == 'ActivePower': try: self._state = float(state_var['Value']) * 1000 except ValueError: _LOGGER.error("Could not parse power for %s", self._id) self._state = 0 self._is_available = False
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/edp_redy/sensor.py
"""Config flow for ZHA.""" from collections import OrderedDict import os import voluptuous as vol from homeassistant import config_entries from .core.const import ( CONF_RADIO_TYPE, CONF_USB_PATH, DEFAULT_DATABASE_NAME, DOMAIN, RadioType) from .core.helpers import check_zigpy_connection @config_entries.HANDLERS.register(DOMAIN) class ZhaFlowHandler(config_entries.ConfigFlow): """Handle a config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH async def async_step_user(self, user_input=None): """Handle a zha config flow start.""" if self._async_current_entries(): return self.async_abort(reason='single_instance_allowed') errors = {} fields = OrderedDict() fields[vol.Required(CONF_USB_PATH)] = str fields[vol.Optional(CONF_RADIO_TYPE, default='ezsp')] = vol.In( RadioType.list() ) if user_input is not None: database = os.path.join(self.hass.config.config_dir, DEFAULT_DATABASE_NAME) test = await check_zigpy_connection(user_input[CONF_USB_PATH], user_input[CONF_RADIO_TYPE], database) if test: return self.async_create_entry( title=user_input[CONF_USB_PATH], data=user_input) errors['base'] = 'cannot_connect' return self.async_show_form( step_id='user', data_schema=vol.Schema(fields), errors=errors ) async def async_step_import(self, import_info): """Handle a zha config import.""" if self._async_current_entries(): return self.async_abort(reason='single_instance_allowed') return self.async_create_entry( title=import_info[CONF_USB_PATH], data=import_info )
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/zha/config_flow.py
""" Support for switching Arduino pins on and off. So far only digital pins are supported. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/switch.arduino/ """ import logging import voluptuous as vol from homeassistant.components import arduino from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) from homeassistant.const import CONF_NAME import homeassistant.helpers.config_validation as cv DEPENDENCIES = ['arduino'] _LOGGER = logging.getLogger(__name__) CONF_PINS = 'pins' CONF_TYPE = 'digital' CONF_NEGATE = 'negate' CONF_INITIAL = 'initial' PIN_SCHEMA = vol.Schema({ vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_INITIAL, default=False): cv.boolean, vol.Optional(CONF_NEGATE, default=False): cv.boolean, }) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_PINS, default={}): vol.Schema({cv.positive_int: PIN_SCHEMA}), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Arduino platform.""" # Verify that Arduino board is present if arduino.BOARD is None: _LOGGER.error("A connection has not been made to the Arduino board") return False pins = config.get(CONF_PINS) switches = [] for pinnum, pin in pins.items(): switches.append(ArduinoSwitch(pinnum, pin)) add_entities(switches) class ArduinoSwitch(SwitchDevice): """Representation of an Arduino switch.""" def __init__(self, pin, options): """Initialize the Pin.""" self._pin = pin self._name = options.get(CONF_NAME) self.pin_type = CONF_TYPE self.direction = 'out' self._state = options.get(CONF_INITIAL) if options.get(CONF_NEGATE): self.turn_on_handler = arduino.BOARD.set_digital_out_low self.turn_off_handler = arduino.BOARD.set_digital_out_high else: self.turn_on_handler = arduino.BOARD.set_digital_out_high self.turn_off_handler = arduino.BOARD.set_digital_out_low arduino.BOARD.set_mode(self._pin, self.direction, self.pin_type) (self.turn_on_handler if self._state else self.turn_off_handler)(pin) @property def name(self): """Get the name of the pin.""" return self._name @property def is_on(self): """Return true if pin is high/on.""" return self._state def turn_on(self, **kwargs): """Turn the pin to high/on.""" self._state = True self.turn_on_handler(self._pin) def turn_off(self, **kwargs): """Turn the pin to low/off.""" self._state = False self.turn_off_handler(self._pin)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/arduino/switch.py
""" Support for functionality to interact with FireTV devices. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/media_player.firetv/ """ import functools import logging import threading import voluptuous as vol from homeassistant.components.media_player import ( MediaPlayerDevice, PLATFORM_SCHEMA) from homeassistant.components.media_player.const import ( SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PORT, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['firetv==1.0.7'] _LOGGER = logging.getLogger(__name__) SUPPORT_FIRETV = SUPPORT_PAUSE | \ SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \ SUPPORT_NEXT_TRACK | SUPPORT_SELECT_SOURCE | SUPPORT_STOP | \ SUPPORT_PLAY CONF_ADBKEY = 'adbkey' CONF_GET_SOURCE = 'get_source' CONF_GET_SOURCES = 'get_sources' DEFAULT_NAME = 'Amazon Fire TV' DEFAULT_PORT = 5555 DEFAULT_GET_SOURCE = True DEFAULT_GET_SOURCES = True def has_adb_files(value): """Check that ADB key files exist.""" priv_key = value pub_key = '{}.pub'.format(value) cv.isfile(pub_key) return cv.isfile(priv_key) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_ADBKEY): has_adb_files, vol.Optional(CONF_GET_SOURCE, default=DEFAULT_GET_SOURCE): cv.boolean, vol.Optional(CONF_GET_SOURCES, default=DEFAULT_GET_SOURCES): cv.boolean }) PACKAGE_LAUNCHER = "com.amazon.tv.launcher" PACKAGE_SETTINGS = "com.amazon.tv.settings" def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the FireTV platform.""" from firetv import FireTV host = '{0}:{1}'.format(config[CONF_HOST], config[CONF_PORT]) if CONF_ADBKEY in config: ftv = FireTV(host, config[CONF_ADBKEY]) adb_log = " using adbkey='{0}'".format(config[CONF_ADBKEY]) else: ftv = FireTV(host) adb_log = "" if not ftv.available: _LOGGER.warning("Could not connect to Fire TV at %s%s", host, adb_log) return name = config[CONF_NAME] get_source = config[CONF_GET_SOURCE] get_sources = config[CONF_GET_SOURCES] device = FireTVDevice(ftv, name, get_source, get_sources) add_entities([device]) _LOGGER.debug("Setup Fire TV at %s%s", host, adb_log) def adb_decorator(override_available=False): """Send an ADB command if the device is available and not locked.""" def adb_wrapper(func): """Wait if previous ADB commands haven't finished.""" @functools.wraps(func) def _adb_wrapper(self, *args, **kwargs): # If the device is unavailable, don't do anything if not self.available and not override_available: return None # If an ADB command is already running, skip this command if not self.adb_lock.acquire(blocking=False): _LOGGER.info("Skipping an ADB command because a previous " "command is still running") return None # Additional ADB commands will be prevented while trying this one try: returns = func(self, *args, **kwargs) except self.exceptions as err: _LOGGER.error( "Failed to execute an ADB command. ADB connection re-" "establishing attempt in the next update. Error: %s", err) returns = None self._available = False # pylint: disable=protected-access finally: self.adb_lock.release() return returns return _adb_wrapper return adb_wrapper class FireTVDevice(MediaPlayerDevice): """Representation of an Amazon Fire TV device on the network.""" def __init__(self, ftv, name, get_source, get_sources): """Initialize the FireTV device.""" from adb.adb_protocol import ( InvalidChecksumError, InvalidCommandError, InvalidResponseError) self.firetv = ftv self._name = name self._get_source = get_source self._get_sources = get_sources # whether or not the ADB connection is currently in use self.adb_lock = threading.Lock() # ADB exceptions to catch self.exceptions = ( AttributeError, BrokenPipeError, TypeError, ValueError, InvalidChecksumError, InvalidCommandError, InvalidResponseError) self._state = None self._available = self.firetv.available self._current_app = None self._running_apps = None @property def name(self): """Return the device name.""" return self._name @property def should_poll(self): """Device should be polled.""" return True @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_FIRETV @property def state(self): """Return the state of the player.""" return self._state @property def available(self): """Return whether or not the ADB connection is valid.""" return self._available @property def app_id(self): """Return the current app.""" return self._current_app @property def source(self): """Return the current app.""" return self._current_app @property def source_list(self): """Return a list of running apps.""" return self._running_apps @adb_decorator(override_available=True) def update(self): """Get the latest date and update device state.""" # Check if device is disconnected. if not self._available: self._running_apps = None self._current_app = None # Try to connect self.firetv.connect() self._available = self.firetv.available # If the ADB connection is not intact, don't update. if not self._available: return # Check if device is off. if not self.firetv.screen_on: self._state = STATE_OFF self._running_apps = None self._current_app = None # Check if screen saver is on. elif not self.firetv.awake: self._state = STATE_IDLE self._running_apps = None self._current_app = None else: # Get the running apps. if self._get_sources: self._running_apps = self.firetv.running_apps # Get the current app. if self._get_source: current_app = self.firetv.current_app if isinstance(current_app, dict)\ and 'package' in current_app: self._current_app = current_app['package'] else: self._current_app = current_app # Show the current app as the only running app. if not self._get_sources: if self._current_app: self._running_apps = [self._current_app] else: self._running_apps = None # Check if the launcher is active. if self._current_app in [PACKAGE_LAUNCHER, PACKAGE_SETTINGS]: self._state = STATE_STANDBY # Check for a wake lock (device is playing). elif self.firetv.wake_lock: self._state = STATE_PLAYING # Otherwise, device is paused. else: self._state = STATE_PAUSED # Don't get the current app. elif self.firetv.wake_lock: # Check for a wake lock (device is playing). self._state = STATE_PLAYING else: # Assume the devices is on standby. self._state = STATE_STANDBY @adb_decorator() def turn_on(self): """Turn on the device.""" self.firetv.turn_on() @adb_decorator() def turn_off(self): """Turn off the device.""" self.firetv.turn_off() @adb_decorator() def media_play(self): """Send play command.""" self.firetv.media_play() @adb_decorator() def media_pause(self): """Send pause command.""" self.firetv.media_pause() @adb_decorator() def media_play_pause(self): """Send play/pause command.""" self.firetv.media_play_pause() @adb_decorator() def media_stop(self): """Send stop (back) command.""" self.firetv.back() @adb_decorator() def volume_up(self): """Send volume up command.""" self.firetv.volume_up() @adb_decorator() def volume_down(self): """Send volume down command.""" self.firetv.volume_down() @adb_decorator() def media_previous_track(self): """Send previous track command (results in rewind).""" self.firetv.media_previous() @adb_decorator() def media_next_track(self): """Send next track command (results in fast-forward).""" self.firetv.media_next() @adb_decorator() def select_source(self, source): """Select input source. If the source starts with a '!', then it will close the app instead of opening it. """ if isinstance(source, str): if not source.startswith('!'): self.firetv.launch_app(source) else: self.firetv.stop_app(source[1:].lstrip())
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/media_player/firetv.py
""" Read temperature information from Eddystone beacons. Your beacons must be configured to transmit UID (for identification) and TLM (for temperature) frames. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.eddystone_temperature/ """ import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_NAME, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, STATE_UNKNOWN, TEMP_CELSIUS) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity REQUIREMENTS = ['beacontools[scan]==1.2.3', 'construct==2.9.45'] _LOGGER = logging.getLogger(__name__) CONF_BEACONS = 'beacons' CONF_BT_DEVICE_ID = 'bt_device_id' CONF_INSTANCE = 'instance' CONF_NAMESPACE = 'namespace' BEACON_SCHEMA = vol.Schema({ vol.Required(CONF_NAMESPACE): cv.string, vol.Required(CONF_INSTANCE): cv.string, vol.Optional(CONF_NAME): cv.string }) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_BT_DEVICE_ID, default=0): cv.positive_int, vol.Required(CONF_BEACONS): vol.Schema({cv.string: BEACON_SCHEMA}), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Validate configuration, create devices and start monitoring thread.""" bt_device_id = config.get("bt_device_id") beacons = config.get(CONF_BEACONS) devices = [] for dev_name, properties in beacons.items(): namespace = get_from_conf(properties, CONF_NAMESPACE, 20) instance = get_from_conf(properties, CONF_INSTANCE, 12) name = properties.get(CONF_NAME, dev_name) if instance is None or namespace is None: _LOGGER.error("Skipping %s", dev_name) continue else: devices.append(EddystoneTemp(name, namespace, instance)) if devices: mon = Monitor(hass, devices, bt_device_id) def monitor_stop(_service_or_event): """Stop the monitor thread.""" _LOGGER.info("Stopping scanner for Eddystone beacons") mon.stop() def monitor_start(_service_or_event): """Start the monitor thread.""" _LOGGER.info("Starting scanner for Eddystone beacons") mon.start() add_entities(devices) mon.start() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop) hass.bus.listen_once(EVENT_HOMEASSISTANT_START, monitor_start) else: _LOGGER.warning("No devices were added") def get_from_conf(config, config_key, length): """Retrieve value from config and validate length.""" string = config.get(config_key) if len(string) != length: _LOGGER.error("Error in config parameter %s: Must be exactly %d " "bytes. Device will not be added", config_key, length/2) return None return string class EddystoneTemp(Entity): """Representation of a temperature sensor.""" def __init__(self, name, namespace, instance): """Initialize a sensor.""" self._name = name self.namespace = namespace self.instance = instance self.bt_addr = None self.temperature = STATE_UNKNOWN @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the device.""" return self.temperature @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return TEMP_CELSIUS @property def should_poll(self): """Return the polling state.""" return False class Monitor: """Continuously scan for BLE advertisements.""" def __init__(self, hass, devices, bt_device_id): """Construct interface object.""" self.hass = hass # List of beacons to monitor self.devices = devices # Number of the bt device (hciX) self.bt_device_id = bt_device_id def callback(bt_addr, _, packet, additional_info): """Handle new packets.""" self.process_packet( additional_info['namespace'], additional_info['instance'], packet.temperature) from beacontools import ( # pylint: disable=import-error BeaconScanner, EddystoneFilter, EddystoneTLMFrame) device_filters = [EddystoneFilter(d.namespace, d.instance) for d in devices] self.scanner = BeaconScanner( callback, bt_device_id, device_filters, EddystoneTLMFrame) self.scanning = False def start(self): """Continuously scan for BLE advertisements.""" if not self.scanning: self.scanner.start() self.scanning = True else: _LOGGER.debug( "start() called, but scanner is already running") def process_packet(self, namespace, instance, temperature): """Assign temperature to device.""" _LOGGER.debug("Received temperature for <%s,%s>: %d", namespace, instance, temperature) for dev in self.devices: if dev.namespace == namespace and dev.instance == instance: if dev.temperature != temperature: dev.temperature = temperature dev.schedule_update_ha_state() def stop(self): """Signal runner to stop and join thread.""" if self.scanning: _LOGGER.debug("Stopping...") self.scanner.stop() _LOGGER.debug("Stopped") self.scanning = False else: _LOGGER.debug( "stop() called but scanner was not running")
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/sensor/eddystone_temperature.py
""" Tracks devices by sending a ICMP echo request (ping). For more details about this platform, please refer to the documentation at https://home-assistant.io/components/device_tracker.ping/ """ import logging import subprocess import sys from datetime import timedelta import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.device_tracker import ( PLATFORM_SCHEMA, CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL, SOURCE_TYPE_ROUTER) from homeassistant import util from homeassistant import const _LOGGER = logging.getLogger(__name__) CONF_PING_COUNT = 'count' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(const.CONF_HOSTS): {cv.string: cv.string}, vol.Optional(CONF_PING_COUNT, default=1): cv.positive_int, }) class Host: """Host object with ping detection.""" def __init__(self, ip_address, dev_id, hass, config): """Initialize the Host pinger.""" self.hass = hass self.ip_address = ip_address self.dev_id = dev_id self._count = config[CONF_PING_COUNT] if sys.platform == 'win32': self._ping_cmd = ['ping', '-n', '1', '-w', '1000', self.ip_address] else: self._ping_cmd = ['ping', '-n', '-q', '-c1', '-W1', self.ip_address] def ping(self): """Send an ICMP echo request and return True if success.""" pinger = subprocess.Popen(self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) try: pinger.communicate() return pinger.returncode == 0 except subprocess.CalledProcessError: return False def update(self, see): """Update device state by sending one or more ping messages.""" failed = 0 while failed < self._count: # check more times if host is unreachable if self.ping(): see(dev_id=self.dev_id, source_type=SOURCE_TYPE_ROUTER) return True failed += 1 _LOGGER.debug("No response from %s failed=%d", self.ip_address, failed) def setup_scanner(hass, config, see, discovery_info=None): """Set up the Host objects and return the update function.""" hosts = [Host(ip, dev_id, hass, config) for (dev_id, ip) in config[const.CONF_HOSTS].items()] interval = config.get(CONF_SCAN_INTERVAL, timedelta(seconds=len(hosts) * config[CONF_PING_COUNT]) + DEFAULT_SCAN_INTERVAL) _LOGGER.debug("Started ping tracker with interval=%s on hosts: %s", interval, ",".join([host.ip_address for host in hosts])) def update_interval(now): """Update all the hosts on every interval time.""" try: for host in hosts: host.update(see) finally: hass.helpers.event.track_point_in_utc_time( update_interval, util.dt.utcnow() + interval) update_interval(None) return True
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/device_tracker/ping.py
""" Support for LimitlessLED bulbs. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/light.limitlessled/ """ import logging import voluptuous as vol from homeassistant.const import ( CONF_NAME, CONF_HOST, CONF_PORT, CONF_TYPE, STATE_ON) from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_WHITE, FLASH_LONG, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_COLOR, SUPPORT_TRANSITION, Light, PLATFORM_SCHEMA) import homeassistant.helpers.config_validation as cv from homeassistant.util.color import ( color_temperature_mired_to_kelvin, color_hs_to_RGB) from homeassistant.helpers.restore_state import RestoreEntity REQUIREMENTS = ['limitlessled==1.1.3'] _LOGGER = logging.getLogger(__name__) CONF_BRIDGES = 'bridges' CONF_GROUPS = 'groups' CONF_NUMBER = 'number' CONF_VERSION = 'version' CONF_FADE = 'fade' DEFAULT_LED_TYPE = 'rgbw' DEFAULT_PORT = 5987 DEFAULT_TRANSITION = 0 DEFAULT_VERSION = 6 DEFAULT_FADE = False LED_TYPE = ['rgbw', 'rgbww', 'white', 'bridge-led', 'dimmer'] EFFECT_NIGHT = 'night' MIN_SATURATION = 10 WHITE = [0, 0] SUPPORT_LIMITLESSLED_WHITE = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_TRANSITION) SUPPORT_LIMITLESSLED_DIMMER = (SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION) SUPPORT_LIMITLESSLED_RGB = (SUPPORT_BRIGHTNESS | SUPPORT_EFFECT | SUPPORT_FLASH | SUPPORT_COLOR | SUPPORT_TRANSITION) SUPPORT_LIMITLESSLED_RGBWW = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_FLASH | SUPPORT_COLOR | SUPPORT_TRANSITION) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_BRIDGES): vol.All(cv.ensure_list, [ { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): cv.positive_int, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Required(CONF_GROUPS): vol.All(cv.ensure_list, [ { vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_TYPE, default=DEFAULT_LED_TYPE): vol.In(LED_TYPE), vol.Required(CONF_NUMBER): cv.positive_int, vol.Optional(CONF_FADE, default=DEFAULT_FADE): cv.boolean, } ]), }, ]), }) def rewrite_legacy(config): """Rewrite legacy configuration to new format.""" bridges = config.get(CONF_BRIDGES, [config]) new_bridges = [] for bridge_conf in bridges: groups = [] if 'groups' in bridge_conf: groups = bridge_conf['groups'] else: _LOGGER.warning("Legacy configuration format detected") for i in range(1, 5): name_key = 'group_%d_name' % i if name_key in bridge_conf: groups.append({ 'number': i, 'type': bridge_conf.get('group_%d_type' % i, DEFAULT_LED_TYPE), 'name': bridge_conf.get(name_key) }) new_bridges.append({ 'host': bridge_conf.get(CONF_HOST), 'version': bridge_conf.get(CONF_VERSION), 'port': bridge_conf.get(CONF_PORT), 'groups': groups }) return {'bridges': new_bridges} def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the LimitlessLED lights.""" from limitlessled.bridge import Bridge # Two legacy configuration formats are supported to maintain backwards # compatibility. config = rewrite_legacy(config) # Use the expanded configuration format. lights = [] for bridge_conf in config.get(CONF_BRIDGES): bridge = Bridge(bridge_conf.get(CONF_HOST), port=bridge_conf.get(CONF_PORT, DEFAULT_PORT), version=bridge_conf.get(CONF_VERSION, DEFAULT_VERSION)) for group_conf in bridge_conf.get(CONF_GROUPS): group = bridge.add_group( group_conf.get(CONF_NUMBER), group_conf.get(CONF_NAME), group_conf.get(CONF_TYPE, DEFAULT_LED_TYPE)) lights.append(LimitlessLEDGroup(group, { 'fade': group_conf[CONF_FADE] })) add_entities(lights) def state(new_state): """State decorator. Specify True (turn on) or False (turn off). """ def decorator(function): """Set up the decorator function.""" # pylint: disable=protected-access def wrapper(self, **kwargs): """Wrap a group state change.""" from limitlessled.pipeline import Pipeline pipeline = Pipeline() transition_time = DEFAULT_TRANSITION if self._effect == EFFECT_COLORLOOP: self.group.stop() self._effect = None # Set transition time. if ATTR_TRANSITION in kwargs: transition_time = int(kwargs[ATTR_TRANSITION]) # Do group type-specific work. function(self, transition_time, pipeline, **kwargs) # Update state. self._is_on = new_state self.group.enqueue(pipeline) self.schedule_update_ha_state() return wrapper return decorator class LimitlessLEDGroup(Light, RestoreEntity): """Representation of a LimitessLED group.""" def __init__(self, group, config): """Initialize a group.""" from limitlessled.group.rgbw import RgbwGroup from limitlessled.group.white import WhiteGroup from limitlessled.group.dimmer import DimmerGroup from limitlessled.group.rgbww import RgbwwGroup if isinstance(group, WhiteGroup): self._supported = SUPPORT_LIMITLESSLED_WHITE self._effect_list = [EFFECT_NIGHT] elif isinstance(group, DimmerGroup): self._supported = SUPPORT_LIMITLESSLED_DIMMER self._effect_list = [] elif isinstance(group, RgbwGroup): self._supported = SUPPORT_LIMITLESSLED_RGB self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE] elif isinstance(group, RgbwwGroup): self._supported = SUPPORT_LIMITLESSLED_RGBWW self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE] self.group = group self.config = config self._is_on = False self._brightness = None self._temperature = None self._color = None self._effect = None async def async_added_to_hass(self): """Handle entity about to be added to hass event.""" await super().async_added_to_hass() last_state = await self.async_get_last_state() if last_state: self._is_on = (last_state.state == STATE_ON) self._brightness = last_state.attributes.get('brightness') self._temperature = last_state.attributes.get('color_temp') self._color = last_state.attributes.get('hs_color') @property def should_poll(self): """No polling needed.""" return False @property def assumed_state(self): """Return True because unable to access real state of the entity.""" return True @property def name(self): """Return the name of the group.""" return self.group.name @property def is_on(self): """Return true if device is on.""" return self._is_on @property def brightness(self): """Return the brightness property.""" if self._effect == EFFECT_NIGHT: return 1 return self._brightness @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" return 154 @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" return 370 @property def color_temp(self): """Return the temperature property.""" if self.hs_color is not None: return None return self._temperature @property def hs_color(self): """Return the color property.""" if self._effect == EFFECT_NIGHT: return None if self._color is None or self._color[1] == 0: return None return self._color @property def supported_features(self): """Flag supported features.""" return self._supported @property def effect(self): """Return the current effect for this light.""" return self._effect @property def effect_list(self): """Return the list of supported effects for this light.""" return self._effect_list # pylint: disable=arguments-differ @state(False) def turn_off(self, transition_time, pipeline, **kwargs): """Turn off a group.""" if self.config[CONF_FADE]: pipeline.transition(transition_time, brightness=0.0) pipeline.off() # pylint: disable=arguments-differ @state(True) def turn_on(self, transition_time, pipeline, **kwargs): """Turn on (or adjust property of) a group.""" # The night effect does not need a turned on light if kwargs.get(ATTR_EFFECT) == EFFECT_NIGHT: if EFFECT_NIGHT in self._effect_list: pipeline.night_light() self._effect = EFFECT_NIGHT return pipeline.on() # Set up transition. args = {} if self.config[CONF_FADE] and not self.is_on and self._brightness: args['brightness'] = self.limitlessled_brightness() if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] args['brightness'] = self.limitlessled_brightness() if ATTR_HS_COLOR in kwargs and self._supported & SUPPORT_COLOR: self._color = kwargs[ATTR_HS_COLOR] # White is a special case. if self._color[1] < MIN_SATURATION: pipeline.white() self._color = WHITE else: args['color'] = self.limitlessled_color() if ATTR_COLOR_TEMP in kwargs: if self._supported & SUPPORT_COLOR: pipeline.white() self._color = WHITE if self._supported & SUPPORT_COLOR_TEMP: self._temperature = kwargs[ATTR_COLOR_TEMP] args['temperature'] = self.limitlessled_temperature() if args: pipeline.transition(transition_time, **args) # Flash. if ATTR_FLASH in kwargs and self._supported & SUPPORT_FLASH: duration = 0 if kwargs[ATTR_FLASH] == FLASH_LONG: duration = 1 pipeline.flash(duration=duration) # Add effects. if ATTR_EFFECT in kwargs and self._effect_list: if kwargs[ATTR_EFFECT] == EFFECT_COLORLOOP: from limitlessled.presets import COLORLOOP self._effect = EFFECT_COLORLOOP pipeline.append(COLORLOOP) if kwargs[ATTR_EFFECT] == EFFECT_WHITE: pipeline.white() self._color = WHITE def limitlessled_temperature(self): """Convert Home Assistant color temperature units to percentage.""" max_kelvin = color_temperature_mired_to_kelvin(self.min_mireds) min_kelvin = color_temperature_mired_to_kelvin(self.max_mireds) width = max_kelvin - min_kelvin kelvin = color_temperature_mired_to_kelvin(self._temperature) temperature = (kelvin - min_kelvin) / width return max(0, min(1, temperature)) def limitlessled_brightness(self): """Convert Home Assistant brightness units to percentage.""" return self._brightness / 255 def limitlessled_color(self): """Convert Home Assistant HS list to RGB Color tuple.""" from limitlessled import Color return Color(*color_hs_to_RGB(*tuple(self._color)))
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/light/limitlessled.py
""" Support for Rflink components. For more details about this component, please refer to the documentation at https://home-assistant.io/components/rflink/ """ import asyncio from collections import defaultdict import logging import async_timeout import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, CONF_COMMAND, CONF_HOST, CONF_PORT, STATE_ON, EVENT_HOMEASSISTANT_STOP) from homeassistant.core import CoreState, callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.deprecation import get_deprecated from homeassistant.helpers.entity import Entity from homeassistant.helpers.dispatcher import ( async_dispatcher_send, async_dispatcher_connect) from homeassistant.helpers.restore_state import RestoreEntity REQUIREMENTS = ['rflink==0.0.37'] _LOGGER = logging.getLogger(__name__) ATTR_EVENT = 'event' ATTR_STATE = 'state' CONF_ALIASES = 'aliases' CONF_ALIASSES = 'aliasses' CONF_GROUP_ALIASES = 'group_aliases' CONF_GROUP_ALIASSES = 'group_aliasses' CONF_GROUP = 'group' CONF_NOGROUP_ALIASES = 'nogroup_aliases' CONF_NOGROUP_ALIASSES = 'nogroup_aliasses' CONF_DEVICE_DEFAULTS = 'device_defaults' CONF_DEVICE_ID = 'device_id' CONF_DEVICES = 'devices' CONF_AUTOMATIC_ADD = 'automatic_add' CONF_FIRE_EVENT = 'fire_event' CONF_IGNORE_DEVICES = 'ignore_devices' CONF_RECONNECT_INTERVAL = 'reconnect_interval' CONF_SIGNAL_REPETITIONS = 'signal_repetitions' CONF_WAIT_FOR_ACK = 'wait_for_ack' DATA_DEVICE_REGISTER = 'rflink_device_register' DATA_ENTITY_LOOKUP = 'rflink_entity_lookup' DATA_ENTITY_GROUP_LOOKUP = 'rflink_entity_group_only_lookup' DEFAULT_RECONNECT_INTERVAL = 10 DEFAULT_SIGNAL_REPETITIONS = 1 CONNECTION_TIMEOUT = 10 EVENT_BUTTON_PRESSED = 'button_pressed' EVENT_KEY_COMMAND = 'command' EVENT_KEY_ID = 'id' EVENT_KEY_SENSOR = 'sensor' EVENT_KEY_UNIT = 'unit' RFLINK_GROUP_COMMANDS = ['allon', 'alloff'] DOMAIN = 'rflink' SERVICE_SEND_COMMAND = 'send_command' SIGNAL_AVAILABILITY = 'rflink_device_available' SIGNAL_HANDLE_EVENT = 'rflink_handle_event_{}' TMP_ENTITY = 'tmp.{}' DEVICE_DEFAULTS_SCHEMA = vol.Schema({ vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean, vol.Optional(CONF_SIGNAL_REPETITIONS, default=DEFAULT_SIGNAL_REPETITIONS): vol.Coerce(int), }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_PORT): vol.Any(cv.port, cv.string), vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_WAIT_FOR_ACK, default=True): cv.boolean, vol.Optional(CONF_RECONNECT_INTERVAL, default=DEFAULT_RECONNECT_INTERVAL): int, vol.Optional(CONF_IGNORE_DEVICES, default=[]): vol.All(cv.ensure_list, [cv.string]), }), }, extra=vol.ALLOW_EXTRA) SEND_COMMAND_SCHEMA = vol.Schema({ vol.Required(CONF_DEVICE_ID): cv.string, vol.Required(CONF_COMMAND): cv.string, }) def identify_event_type(event): """Look at event to determine type of device. Async friendly. """ if EVENT_KEY_COMMAND in event: return EVENT_KEY_COMMAND if EVENT_KEY_SENSOR in event: return EVENT_KEY_SENSOR return 'unknown' async def async_setup(hass, config): """Set up the Rflink component.""" from rflink.protocol import create_rflink_connection import serial # Allow entities to register themselves by device_id to be looked up when # new rflink events arrive to be handled hass.data[DATA_ENTITY_LOOKUP] = { EVENT_KEY_COMMAND: defaultdict(list), EVENT_KEY_SENSOR: defaultdict(list), } hass.data[DATA_ENTITY_GROUP_LOOKUP] = { EVENT_KEY_COMMAND: defaultdict(list), } # Allow platform to specify function to register new unknown devices hass.data[DATA_DEVICE_REGISTER] = {} async def async_send_command(call): """Send Rflink command.""" _LOGGER.debug('Rflink command for %s', str(call.data)) if not (await RflinkCommand.send_command( call.data.get(CONF_DEVICE_ID), call.data.get(CONF_COMMAND))): _LOGGER.error('Failed Rflink command for %s', str(call.data)) hass.services.async_register( DOMAIN, SERVICE_SEND_COMMAND, async_send_command, schema=SEND_COMMAND_SCHEMA) @callback def event_callback(event): """Handle incoming Rflink events. Rflink events arrive as dictionaries of varying content depending on their type. Identify the events and distribute accordingly. """ event_type = identify_event_type(event) _LOGGER.debug('event of type %s: %s', event_type, event) # Don't propagate non entity events (eg: version string, ack response) if event_type not in hass.data[DATA_ENTITY_LOOKUP]: _LOGGER.debug('unhandled event of type: %s', event_type) return # Lookup entities who registered this device id as device id or alias event_id = event.get(EVENT_KEY_ID, None) is_group_event = (event_type == EVENT_KEY_COMMAND and event[EVENT_KEY_COMMAND] in RFLINK_GROUP_COMMANDS) if is_group_event: entity_ids = hass.data[DATA_ENTITY_GROUP_LOOKUP][event_type].get( event_id, []) else: entity_ids = hass.data[DATA_ENTITY_LOOKUP][event_type][event_id] _LOGGER.debug('entity_ids: %s', entity_ids) if entity_ids: # Propagate event to every entity matching the device id for entity in entity_ids: _LOGGER.debug('passing event to %s', entity) async_dispatcher_send(hass, SIGNAL_HANDLE_EVENT.format(entity), event) elif not is_group_event: # If device is not yet known, register with platform (if loaded) if event_type in hass.data[DATA_DEVICE_REGISTER]: _LOGGER.debug('device_id not known, adding new device') # Add bogus event_id first to avoid race if we get another # event before the device is created # Any additional events received before the device has been # created will thus be ignored. hass.data[DATA_ENTITY_LOOKUP][event_type][ event_id].append(TMP_ENTITY.format(event_id)) hass.async_create_task( hass.data[DATA_DEVICE_REGISTER][event_type](event)) else: _LOGGER.debug('device_id not known and automatic add disabled') # When connecting to tcp host instead of serial port (optional) host = config[DOMAIN].get(CONF_HOST) # TCP port when host configured, otherwise serial port port = config[DOMAIN][CONF_PORT] @callback def reconnect(exc=None): """Schedule reconnect after connection has been unexpectedly lost.""" # Reset protocol binding before starting reconnect RflinkCommand.set_rflink_protocol(None) async_dispatcher_send(hass, SIGNAL_AVAILABILITY, False) # If HA is not stopping, initiate new connection if hass.state != CoreState.stopping: _LOGGER.warning('disconnected from Rflink, reconnecting') hass.async_create_task(connect()) async def connect(): """Set up connection and hook it into HA for reconnect/shutdown.""" _LOGGER.info('Initiating Rflink connection') # Rflink create_rflink_connection decides based on the value of host # (string or None) if serial or tcp mode should be used # Initiate serial/tcp connection to Rflink gateway connection = create_rflink_connection( port=port, host=host, event_callback=event_callback, disconnect_callback=reconnect, loop=hass.loop, ignore=config[DOMAIN][CONF_IGNORE_DEVICES] ) try: with async_timeout.timeout(CONNECTION_TIMEOUT, loop=hass.loop): transport, protocol = await connection except (serial.serialutil.SerialException, ConnectionRefusedError, TimeoutError, OSError, asyncio.TimeoutError) as exc: reconnect_interval = config[DOMAIN][CONF_RECONNECT_INTERVAL] _LOGGER.exception( "Error connecting to Rflink, reconnecting in %s", reconnect_interval) # Connection to Rflink device is lost, make entities unavailable async_dispatcher_send(hass, SIGNAL_AVAILABILITY, False) hass.loop.call_later(reconnect_interval, reconnect, exc) return # There is a valid connection to a Rflink device now so # mark entities as available async_dispatcher_send(hass, SIGNAL_AVAILABILITY, True) # Bind protocol to command class to allow entities to send commands RflinkCommand.set_rflink_protocol( protocol, config[DOMAIN][CONF_WAIT_FOR_ACK]) # handle shutdown of Rflink asyncio transport hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, lambda x: transport.close()) _LOGGER.info('Connected to Rflink') hass.async_create_task(connect()) return True class RflinkDevice(Entity): """Representation of a Rflink device. Contains the common logic for Rflink entities. """ platform = None _state = None _available = True def __init__(self, device_id, initial_event=None, name=None, aliases=None, group=True, group_aliases=None, nogroup_aliases=None, fire_event=False, signal_repetitions=DEFAULT_SIGNAL_REPETITIONS): """Initialize the device.""" # Rflink specific attributes for every component type self._initial_event = initial_event self._device_id = device_id if name: self._name = name else: self._name = device_id self._aliases = aliases self._group = group self._group_aliases = group_aliases self._nogroup_aliases = nogroup_aliases self._should_fire_event = fire_event self._signal_repetitions = signal_repetitions @callback def handle_event_callback(self, event): """Handle incoming event for device type.""" # Call platform specific event handler self._handle_event(event) # Propagate changes through ha self.async_schedule_update_ha_state() # Put command onto bus for user to subscribe to if self._should_fire_event and identify_event_type( event) == EVENT_KEY_COMMAND: self.hass.bus.async_fire(EVENT_BUTTON_PRESSED, { ATTR_ENTITY_ID: self.entity_id, ATTR_STATE: event[EVENT_KEY_COMMAND], }) _LOGGER.debug("Fired bus event for %s: %s", self.entity_id, event[EVENT_KEY_COMMAND]) def _handle_event(self, event): """Platform specific event handler.""" raise NotImplementedError() @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return a name for the device.""" return self._name @property def is_on(self): """Return true if device is on.""" if self.assumed_state: return False return self._state @property def assumed_state(self): """Assume device state until first device event sets state.""" return self._state is None @property def available(self): """Return True if entity is available.""" return self._available @callback def _availability_callback(self, availability): """Update availability state.""" self._available = availability self.async_schedule_update_ha_state() async def async_added_to_hass(self): """Register update callback.""" # Remove temporary bogus entity_id if added tmp_entity = TMP_ENTITY.format(self._device_id) if tmp_entity in self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_COMMAND][self._device_id]: self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_COMMAND][self._device_id].remove(tmp_entity) # Register id and aliases self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_COMMAND][self._device_id].append(self.entity_id) if self._group: self.hass.data[DATA_ENTITY_GROUP_LOOKUP][ EVENT_KEY_COMMAND][self._device_id].append(self.entity_id) # aliases respond to both normal and group commands (allon/alloff) if self._aliases: for _id in self._aliases: self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_COMMAND][_id].append(self.entity_id) self.hass.data[DATA_ENTITY_GROUP_LOOKUP][ EVENT_KEY_COMMAND][_id].append(self.entity_id) # group_aliases only respond to group commands (allon/alloff) if self._group_aliases: for _id in self._group_aliases: self.hass.data[DATA_ENTITY_GROUP_LOOKUP][ EVENT_KEY_COMMAND][_id].append(self.entity_id) # nogroup_aliases only respond to normal commands if self._nogroup_aliases: for _id in self._nogroup_aliases: self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_COMMAND][_id].append(self.entity_id) async_dispatcher_connect(self.hass, SIGNAL_AVAILABILITY, self._availability_callback) async_dispatcher_connect(self.hass, SIGNAL_HANDLE_EVENT.format(self.entity_id), self.handle_event_callback) # Process the initial event now that the entity is created if self._initial_event: self.handle_event_callback(self._initial_event) class RflinkCommand(RflinkDevice): """Singleton class to make Rflink command interface available to entities. This class is to be inherited by every Entity class that is actionable (switches/lights). It exposes the Rflink command interface for these entities. The Rflink interface is managed as a class level and set during setup (and reset on reconnect). """ # Keep repetition tasks to cancel if state is changed before repetitions # are sent _repetition_task = None _protocol = None @classmethod def set_rflink_protocol(cls, protocol, wait_ack=None): """Set the Rflink asyncio protocol as a class variable.""" cls._protocol = protocol if wait_ack is not None: cls._wait_ack = wait_ack @classmethod def is_connected(cls): """Return connection status.""" return bool(cls._protocol) @classmethod async def send_command(cls, device_id, action): """Send device command to Rflink and wait for acknowledgement.""" return await cls._protocol.send_command_ack(device_id, action) async def _async_handle_command(self, command, *args): """Do bookkeeping for command, send it to rflink and update state.""" self.cancel_queued_send_commands() if command == 'turn_on': cmd = 'on' self._state = True elif command == 'turn_off': cmd = 'off' self._state = False elif command == 'dim': # convert brightness to rflink dim level cmd = str(int(args[0] / 17)) self._state = True elif command == 'toggle': cmd = 'on' # if the state is unknown or false, it gets set as true # if the state is true, it gets set as false self._state = self._state in [None, False] # Cover options for RFlink elif command == 'close_cover': cmd = 'DOWN' self._state = False elif command == 'open_cover': cmd = 'UP' self._state = True elif command == 'stop_cover': cmd = 'STOP' self._state = True # Send initial command and queue repetitions. # This allows the entity state to be updated quickly and not having to # wait for all repetitions to be sent await self._async_send_command(cmd, self._signal_repetitions) # Update state of entity await self.async_update_ha_state() def cancel_queued_send_commands(self): """Cancel queued signal repetition commands. For example when user changed state while repetitions are still queued for broadcast. Or when an incoming Rflink command (remote switch) changes the state. """ # cancel any outstanding tasks from the previous state change if self._repetition_task: self._repetition_task.cancel() async def _async_send_command(self, cmd, repetitions): """Send a command for device to Rflink gateway.""" _LOGGER.debug( "Sending command: %s to Rflink device: %s", cmd, self._device_id) if not self.is_connected(): raise HomeAssistantError('Cannot send command, not connected!') if self._wait_ack: # Puts command on outgoing buffer then waits for Rflink to confirm # the command has been send out in the ether. await self._protocol.send_command_ack(self._device_id, cmd) else: # Puts command on outgoing buffer and returns straight away. # Rflink protocol/transport handles asynchronous writing of buffer # to serial/tcp device. Does not wait for command send # confirmation. self._protocol.send_command(self._device_id, cmd) if repetitions > 1: self._repetition_task = self.hass.async_create_task( self._async_send_command(cmd, repetitions - 1)) class SwitchableRflinkDevice(RflinkCommand, RestoreEntity): """Rflink entity which can switch on/off (eg: light, switch).""" async def async_added_to_hass(self): """Restore RFLink device state (ON/OFF).""" await super().async_added_to_hass() old_state = await self.async_get_last_state() if old_state is not None: self._state = old_state.state == STATE_ON def _handle_event(self, event): """Adjust state if Rflink picks up a remote command for this device.""" self.cancel_queued_send_commands() command = event['command'] if command in ['on', 'allon']: self._state = True elif command in ['off', 'alloff']: self._state = False def async_turn_on(self, **kwargs): """Turn the device on.""" return self._async_handle_command("turn_on") def async_turn_off(self, **kwargs): """Turn the device off.""" return self._async_handle_command("turn_off") DEPRECATED_CONFIG_OPTIONS = [ CONF_ALIASSES, CONF_GROUP_ALIASSES, CONF_NOGROUP_ALIASSES] REPLACEMENT_CONFIG_OPTIONS = [ CONF_ALIASES, CONF_GROUP_ALIASES, CONF_NOGROUP_ALIASES] def remove_deprecated(config): """Remove deprecated config options from device config.""" for index, deprecated_option in enumerate(DEPRECATED_CONFIG_OPTIONS): if deprecated_option in config: replacement_option = REPLACEMENT_CONFIG_OPTIONS[index] # generate deprecation warning get_deprecated(config, replacement_option, deprecated_option) # remove old config value replacing new one config[replacement_option] = config.pop(deprecated_option)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/rflink/__init__.py
""" Support for Concord232 alarm control panels. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/alarm_control_panel.concord232/ """ import datetime import logging import requests import voluptuous as vol import homeassistant.components.alarm_control_panel as alarm import homeassistant.helpers.config_validation as cv from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PORT, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED) REQUIREMENTS = ['concord232==0.15'] _LOGGER = logging.getLogger(__name__) DEFAULT_HOST = 'localhost' DEFAULT_NAME = 'CONCORD232' DEFAULT_PORT = 5007 SCAN_INTERVAL = datetime.timedelta(seconds=10) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Concord232 alarm control panel platform.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) port = config.get(CONF_PORT) url = 'http://{}:{}'.format(host, port) try: add_entities([Concord232Alarm(url, name)], True) except requests.exceptions.ConnectionError as ex: _LOGGER.error("Unable to connect to Concord232: %s", str(ex)) class Concord232Alarm(alarm.AlarmControlPanel): """Representation of the Concord232-based alarm panel.""" def __init__(self, url, name): """Initialize the Concord232 alarm panel.""" from concord232 import client as concord232_client self._state = None self._name = name self._url = url self._alarm = concord232_client.Client(self._url) self._alarm.partitions = self._alarm.list_partitions() self._alarm.last_partition_update = datetime.datetime.now() @property def name(self): """Return the name of the device.""" return self._name @property def code_format(self): """Return the characters if code is defined.""" return alarm.FORMAT_NUMBER @property def state(self): """Return the state of the device.""" return self._state def update(self): """Update values from API.""" try: part = self._alarm.list_partitions()[0] except requests.exceptions.ConnectionError as ex: _LOGGER.error("Unable to connect to %(host)s: %(reason)s", dict(host=self._url, reason=ex)) return except IndexError: _LOGGER.error("Concord232 reports no partitions") return if part['arming_level'] == 'Off': self._state = STATE_ALARM_DISARMED elif 'Home' in part['arming_level']: self._state = STATE_ALARM_ARMED_HOME else: self._state = STATE_ALARM_ARMED_AWAY def alarm_disarm(self, code=None): """Send disarm command.""" self._alarm.disarm(code) def alarm_arm_home(self, code=None): """Send arm home command.""" self._alarm.arm('stay') def alarm_arm_away(self, code=None): """Send arm away command.""" self._alarm.arm('away')
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/alarm_control_panel/concord232.py
"""Constants for Google Hangouts Component.""" import logging import voluptuous as vol from homeassistant.components.notify \ import ATTR_MESSAGE, ATTR_TARGET, ATTR_DATA import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger('homeassistant.components.hangouts') DOMAIN = 'hangouts' CONF_2FA = '2fa' CONF_REFRESH_TOKEN = 'refresh_token' CONF_BOT = 'bot' CONF_CONVERSATIONS = 'conversations' CONF_DEFAULT_CONVERSATIONS = 'default_conversations' CONF_ERROR_SUPPRESSED_CONVERSATIONS = 'error_suppressed_conversations' CONF_INTENTS = 'intents' CONF_INTENT_TYPE = 'intent_type' CONF_SENTENCES = 'sentences' CONF_MATCHERS = 'matchers' INTENT_HELP = 'HangoutsHelp' EVENT_HANGOUTS_CONNECTED = 'hangouts_connected' EVENT_HANGOUTS_DISCONNECTED = 'hangouts_disconnected' EVENT_HANGOUTS_USERS_CHANGED = 'hangouts_users_changed' EVENT_HANGOUTS_CONVERSATIONS_CHANGED = 'hangouts_conversations_changed' EVENT_HANGOUTS_CONVERSATIONS_RESOLVED = 'hangouts_conversations_resolved' EVENT_HANGOUTS_MESSAGE_RECEIVED = 'hangouts_message_received' CONF_CONVERSATION_ID = 'id' CONF_CONVERSATION_NAME = 'name' SERVICE_SEND_MESSAGE = 'send_message' SERVICE_UPDATE = 'update' SERVICE_RECONNECT = 'reconnect' TARGETS_SCHEMA = vol.All( vol.Schema({ vol.Exclusive(CONF_CONVERSATION_ID, 'id or name'): cv.string, vol.Exclusive(CONF_CONVERSATION_NAME, 'id or name'): cv.string }), cv.has_at_least_one_key(CONF_CONVERSATION_ID, CONF_CONVERSATION_NAME) ) MESSAGE_SEGMENT_SCHEMA = vol.Schema({ vol.Required('text'): cv.string, vol.Optional('is_bold'): cv.boolean, vol.Optional('is_italic'): cv.boolean, vol.Optional('is_strikethrough'): cv.boolean, vol.Optional('is_underline'): cv.boolean, vol.Optional('parse_str'): cv.boolean, vol.Optional('link_target'): cv.string }) MESSAGE_DATA_SCHEMA = vol.Schema({ vol.Optional('image_file'): cv.string, vol.Optional('image_url'): cv.string }) MESSAGE_SCHEMA = vol.Schema({ vol.Required(ATTR_TARGET): [TARGETS_SCHEMA], vol.Required(ATTR_MESSAGE): [MESSAGE_SEGMENT_SCHEMA], vol.Optional(ATTR_DATA): MESSAGE_DATA_SCHEMA }) INTENT_SCHEMA = vol.All( # Basic Schema vol.Schema({ vol.Required(CONF_SENTENCES): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_CONVERSATIONS): [TARGETS_SCHEMA] }), )
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/hangouts/const.py
""" Support for Nest thermostats. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/climate.nest/ """ import logging import voluptuous as vol from homeassistant.components.nest import ( DATA_NEST, SIGNAL_NEST_UPDATE, DOMAIN as NEST_DOMAIN) from homeassistant.components.climate import ( STATE_AUTO, STATE_COOL, STATE_HEAT, STATE_ECO, ClimateDevice, PLATFORM_SCHEMA, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, ATTR_TEMPERATURE, SUPPORT_TARGET_TEMPERATURE, SUPPORT_TARGET_TEMPERATURE_HIGH, SUPPORT_TARGET_TEMPERATURE_LOW, SUPPORT_OPERATION_MODE, SUPPORT_AWAY_MODE, SUPPORT_FAN_MODE) from homeassistant.const import ( TEMP_CELSIUS, TEMP_FAHRENHEIT, CONF_SCAN_INTERVAL, STATE_ON, STATE_OFF) from homeassistant.helpers.dispatcher import async_dispatcher_connect DEPENDENCIES = ['nest'] _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_SCAN_INTERVAL): vol.All(vol.Coerce(int), vol.Range(min=1)), }) NEST_MODE_HEAT_COOL = 'heat-cool' def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Nest thermostat. No longer in use. """ async def async_setup_entry(hass, entry, async_add_entities): """Set up the Nest climate device based on a config entry.""" temp_unit = hass.config.units.temperature_unit thermostats = await hass.async_add_job(hass.data[DATA_NEST].thermostats) all_devices = [NestThermostat(structure, device, temp_unit) for structure, device in thermostats] async_add_entities(all_devices, True) class NestThermostat(ClimateDevice): """Representation of a Nest thermostat.""" def __init__(self, structure, device, temp_unit): """Initialize the thermostat.""" self._unit = temp_unit self.structure = structure self.device = device self._fan_list = [STATE_ON, STATE_AUTO] # Set the default supported features self._support_flags = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE | SUPPORT_AWAY_MODE) # Not all nest devices support cooling and heating remove unused self._operation_list = [STATE_OFF] # Add supported nest thermostat features if self.device.can_heat: self._operation_list.append(STATE_HEAT) if self.device.can_cool: self._operation_list.append(STATE_COOL) if self.device.can_heat and self.device.can_cool: self._operation_list.append(STATE_AUTO) self._support_flags = (self._support_flags | SUPPORT_TARGET_TEMPERATURE_HIGH | SUPPORT_TARGET_TEMPERATURE_LOW) self._operation_list.append(STATE_ECO) # feature of device self._has_fan = self.device.has_fan if self._has_fan: self._support_flags = (self._support_flags | SUPPORT_FAN_MODE) # data attributes self._away = None self._location = None self._name = None self._humidity = None self._target_temperature = None self._temperature = None self._temperature_scale = None self._mode = None self._fan = None self._eco_temperature = None self._is_locked = None self._locked_temperature = None self._min_temperature = None self._max_temperature = None @property def should_poll(self): """Do not need poll thanks using Nest streaming API.""" return False async def async_added_to_hass(self): """Register update signal handler.""" async def async_update_state(): """Update device state.""" await self.async_update_ha_state(True) async_dispatcher_connect(self.hass, SIGNAL_NEST_UPDATE, async_update_state) @property def supported_features(self): """Return the list of supported features.""" return self._support_flags @property def unique_id(self): """Return unique ID for this device.""" return self.device.serial @property def device_info(self): """Return information about the device.""" return { 'identifiers': { (NEST_DOMAIN, self.device.device_id), }, 'name': self.device.name_long, 'manufacturer': 'Nest Labs', 'model': "Thermostat", 'sw_version': self.device.software_version, } @property def name(self): """Return the name of the nest, if any.""" return self._name @property def temperature_unit(self): """Return the unit of measurement.""" return self._temperature_scale @property def current_temperature(self): """Return the current temperature.""" return self._temperature @property def current_operation(self): """Return current operation ie. heat, cool, idle.""" if self._mode in [STATE_HEAT, STATE_COOL, STATE_OFF, STATE_ECO]: return self._mode if self._mode == NEST_MODE_HEAT_COOL: return STATE_AUTO return None @property def target_temperature(self): """Return the temperature we try to reach.""" if self._mode not in (NEST_MODE_HEAT_COOL, STATE_ECO): return self._target_temperature return None @property def target_temperature_low(self): """Return the lower bound temperature we try to reach.""" if self._mode == STATE_ECO: return self._eco_temperature[0] if self._mode == NEST_MODE_HEAT_COOL: return self._target_temperature[0] return None @property def target_temperature_high(self): """Return the upper bound temperature we try to reach.""" if self._mode == STATE_ECO: return self._eco_temperature[1] if self._mode == NEST_MODE_HEAT_COOL: return self._target_temperature[1] return None @property def is_away_mode_on(self): """Return if away mode is on.""" return self._away def set_temperature(self, **kwargs): """Set new target temperature.""" import nest temp = None target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW) target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH) if self._mode == NEST_MODE_HEAT_COOL: if target_temp_low is not None and target_temp_high is not None: temp = (target_temp_low, target_temp_high) _LOGGER.debug("Nest set_temperature-output-value=%s", temp) else: temp = kwargs.get(ATTR_TEMPERATURE) _LOGGER.debug("Nest set_temperature-output-value=%s", temp) try: if temp is not None: self.device.target = temp except nest.nest.APIError as api_error: _LOGGER.error("An error occurred while setting temperature: %s", api_error) # restore target temperature self.schedule_update_ha_state(True) def set_operation_mode(self, operation_mode): """Set operation mode.""" if operation_mode in [STATE_HEAT, STATE_COOL, STATE_OFF, STATE_ECO]: device_mode = operation_mode elif operation_mode == STATE_AUTO: device_mode = NEST_MODE_HEAT_COOL else: device_mode = STATE_OFF _LOGGER.error( "An error occurred while setting device mode. " "Invalid operation mode: %s", operation_mode) self.device.mode = device_mode @property def operation_list(self): """List of available operation modes.""" return self._operation_list def turn_away_mode_on(self): """Turn away on.""" self.structure.away = True def turn_away_mode_off(self): """Turn away off.""" self.structure.away = False @property def current_fan_mode(self): """Return whether the fan is on.""" if self._has_fan: # Return whether the fan is on return STATE_ON if self._fan else STATE_AUTO # No Fan available so disable slider return None @property def fan_list(self): """List of available fan modes.""" if self._has_fan: return self._fan_list return None def set_fan_mode(self, fan_mode): """Turn fan on/off.""" if self._has_fan: self.device.fan = fan_mode.lower() @property def min_temp(self): """Identify min_temp in Nest API or defaults if not available.""" return self._min_temperature @property def max_temp(self): """Identify max_temp in Nest API or defaults if not available.""" return self._max_temperature def update(self): """Cache value from Python-nest.""" self._location = self.device.where self._name = self.device.name self._humidity = self.device.humidity self._temperature = self.device.temperature self._mode = self.device.mode self._target_temperature = self.device.target self._fan = self.device.fan self._away = self.structure.away == 'away' self._eco_temperature = self.device.eco_temperature self._locked_temperature = self.device.locked_temperature self._min_temperature = self.device.min_temperature self._max_temperature = self.device.max_temperature self._is_locked = self.device.is_locked if self.device.temperature_scale == 'C': self._temperature_scale = TEMP_CELSIUS else: self._temperature_scale = TEMP_FAHRENHEIT
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/nest/climate.py
""" Add support for the Xiaomi TVs. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/xiaomi_tv/ """ import logging import voluptuous as vol from homeassistant.components.media_player import ( MediaPlayerDevice, PLATFORM_SCHEMA) from homeassistant.components.media_player.const import ( SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_STEP) from homeassistant.const import CONF_HOST, CONF_NAME, STATE_OFF, STATE_ON import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['pymitv==1.4.3'] DEFAULT_NAME = "Xiaomi TV" _LOGGER = logging.getLogger(__name__) SUPPORT_XIAOMI_TV = SUPPORT_VOLUME_STEP | SUPPORT_TURN_ON | \ SUPPORT_TURN_OFF # No host is needed for configuration, however it can be set. PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Xiaomi TV platform.""" from pymitv import Discover # If a hostname is set. Discovery is skipped. host = config.get(CONF_HOST) name = config.get(CONF_NAME) if host is not None: # Check if there's a valid TV at the IP address. if not Discover().check_ip(host): _LOGGER.error( "Could not find Xiaomi TV with specified IP: %s", host) else: # Register TV with Home Assistant. add_entities([XiaomiTV(host, name)]) else: # Otherwise, discover TVs on network. add_entities(XiaomiTV(tv, DEFAULT_NAME) for tv in Discover().scan()) class XiaomiTV(MediaPlayerDevice): """Represent the Xiaomi TV for Home Assistant.""" def __init__(self, ip, name): """Receive IP address and name to construct class.""" # Import pymitv library. from pymitv import TV # Initialize the Xiaomi TV. self._tv = TV(ip) # Default name value, only to be overridden by user. self._name = name self._state = STATE_OFF @property def name(self): """Return the display name of this TV.""" return self._name @property def state(self): """Return _state variable, containing the appropriate constant.""" return self._state @property def assumed_state(self): """Indicate that state is assumed.""" return True @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_XIAOMI_TV def turn_off(self): """ Instruct the TV to turn sleep. This is done instead of turning off, because the TV won't accept any input when turned off. Thus, the user would be unable to turn the TV back on, unless it's done manually. """ if self._state is not STATE_OFF: self._tv.sleep() self._state = STATE_OFF def turn_on(self): """Wake the TV back up from sleep.""" if self._state is not STATE_ON: self._tv.wake() self._state = STATE_ON def volume_up(self): """Increase volume by one.""" self._tv.volume_up() def volume_down(self): """Decrease volume by one.""" self._tv.volume_down()
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/media_player/xiaomi_tv.py
""" Interfaces with Z-Wave sensors. For more details about this platform, please refer to the documentation https://home-assistant.io/components/binary_sensor.zwave/ """ import logging import datetime import homeassistant.util.dt as dt_util from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import track_point_in_time from homeassistant.components import zwave from homeassistant.components.zwave import workaround from homeassistant.components.binary_sensor import ( DOMAIN, BinarySensorDevice) _LOGGER = logging.getLogger(__name__) DEPENDENCIES = [] async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Old method of setting up Z-Wave binary sensors.""" pass async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave binary sensors from Config Entry.""" @callback def async_add_binary_sensor(binary_sensor): """Add Z-Wave binary sensor.""" async_add_entities([binary_sensor]) async_dispatcher_connect(hass, 'zwave_new_binary_sensor', async_add_binary_sensor) def get_device(values, **kwargs): """Create Z-Wave entity device.""" device_mapping = workaround.get_device_mapping(values.primary) if device_mapping == workaround.WORKAROUND_NO_OFF_EVENT: return ZWaveTriggerSensor(values, "motion") if workaround.get_device_component_mapping(values.primary) == DOMAIN: return ZWaveBinarySensor(values, None) if values.primary.command_class == zwave.const.COMMAND_CLASS_SENSOR_BINARY: return ZWaveBinarySensor(values, None) return None class ZWaveBinarySensor(BinarySensorDevice, zwave.ZWaveDeviceEntity): """Representation of a binary sensor within Z-Wave.""" def __init__(self, values, device_class): """Initialize the sensor.""" zwave.ZWaveDeviceEntity.__init__(self, values, DOMAIN) self._sensor_type = device_class self._state = self.values.primary.data def update_properties(self): """Handle data changes for node values.""" self._state = self.values.primary.data @property def is_on(self): """Return true if the binary sensor is on.""" return self._state @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return self._sensor_type class ZWaveTriggerSensor(ZWaveBinarySensor): """Representation of a stateless sensor within Z-Wave.""" def __init__(self, values, device_class): """Initialize the sensor.""" super(ZWaveTriggerSensor, self).__init__(values, device_class) # Set default off delay to 60 sec self.re_arm_sec = 60 self.invalidate_after = None def update_properties(self): """Handle value changes for this entity's node.""" self._state = self.values.primary.data _LOGGER.debug('off_delay=%s', self.values.off_delay) # Set re_arm_sec if off_delay is provided from the sensor if self.values.off_delay: _LOGGER.debug('off_delay.data=%s', self.values.off_delay.data) self.re_arm_sec = self.values.off_delay.data * 8 # only allow this value to be true for re_arm secs if not self.hass: return self.invalidate_after = dt_util.utcnow() + datetime.timedelta( seconds=self.re_arm_sec) track_point_in_time( self.hass, self.async_update_ha_state, self.invalidate_after) @property def is_on(self): """Return true if movement has happened within the rearm time.""" return self._state and \ (self.invalidate_after is None or self.invalidate_after > dt_util.utcnow())
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/zwave/binary_sensor.py
""" Support for Epson projector. For more details about this component, please refer to the documentation at https://home-assistant.io/components/media_player.epson/ """ import logging import voluptuous as vol from homeassistant.components.media_player import ( MediaPlayerDevice, MEDIA_PLAYER_SCHEMA, PLATFORM_SCHEMA) from homeassistant.components.media_player.const import ( DOMAIN, SUPPORT_NEXT_TRACK, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP) from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, CONF_NAME, CONF_PORT, CONF_SSL, STATE_OFF, STATE_ON) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['epson-projector==0.1.3'] _LOGGER = logging.getLogger(__name__) ATTR_CMODE = 'cmode' DATA_EPSON = 'epson' DEFAULT_NAME = 'EPSON Projector' SERVICE_SELECT_CMODE = 'epson_select_cmode' SUPPORT_CMODE = 33001 SUPPORT_EPSON = SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE |\ SUPPORT_CMODE | SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | \ SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=80): cv.port, vol.Optional(CONF_SSL, default=False): cv.boolean, }) async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the Epson media player platform.""" from epson_projector.const import (CMODE_LIST_SET) if DATA_EPSON not in hass.data: hass.data[DATA_EPSON] = [] name = config.get(CONF_NAME) host = config.get(CONF_HOST) port = config.get(CONF_PORT) ssl = config.get(CONF_SSL) epson = EpsonProjector(async_get_clientsession( hass, verify_ssl=False), name, host, port, ssl) hass.data[DATA_EPSON].append(epson) async_add_entities([epson], update_before_add=True) async def async_service_handler(service): """Handle for services.""" entity_ids = service.data.get(ATTR_ENTITY_ID) if entity_ids: devices = [device for device in hass.data[DATA_EPSON] if device.entity_id in entity_ids] else: devices = hass.data[DATA_EPSON] for device in devices: if service.service == SERVICE_SELECT_CMODE: cmode = service.data.get(ATTR_CMODE) await device.select_cmode(cmode) device.async_schedule_update_ha_state(True) epson_schema = MEDIA_PLAYER_SCHEMA.extend({ vol.Required(ATTR_CMODE): vol.All(cv.string, vol.Any(*CMODE_LIST_SET)) }) hass.services.async_register( DOMAIN, SERVICE_SELECT_CMODE, async_service_handler, schema=epson_schema) class EpsonProjector(MediaPlayerDevice): """Representation of Epson Projector Device.""" def __init__(self, websession, name, host, port, encryption): """Initialize entity to control Epson projector.""" import epson_projector as epson from epson_projector.const import DEFAULT_SOURCES self._name = name self._projector = epson.Projector( host, websession=websession, port=port) self._cmode = None self._source_list = list(DEFAULT_SOURCES.values()) self._source = None self._volume = None self._state = None async def async_update(self): """Update state of device.""" from epson_projector.const import ( EPSON_CODES, POWER, CMODE, CMODE_LIST, SOURCE, VOLUME, BUSY, SOURCE_LIST) is_turned_on = await self._projector.get_property(POWER) _LOGGER.debug("Project turn on/off status: %s", is_turned_on) if is_turned_on and is_turned_on == EPSON_CODES[POWER]: self._state = STATE_ON cmode = await self._projector.get_property(CMODE) self._cmode = CMODE_LIST.get(cmode, self._cmode) source = await self._projector.get_property(SOURCE) self._source = SOURCE_LIST.get(source, self._source) volume = await self._projector.get_property(VOLUME) if volume: self._volume = volume elif is_turned_on == BUSY: self._state = STATE_ON else: self._state = STATE_OFF @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the state of the device.""" return self._state @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_EPSON async def async_turn_on(self): """Turn on epson.""" from epson_projector.const import TURN_ON await self._projector.send_command(TURN_ON) async def async_turn_off(self): """Turn off epson.""" from epson_projector.const import TURN_OFF await self._projector.send_command(TURN_OFF) @property def source_list(self): """List of available input sources.""" return self._source_list @property def source(self): """Get current input sources.""" return self._source @property def volume_level(self): """Return the volume level of the media player (0..1).""" return self._volume async def select_cmode(self, cmode): """Set color mode in Epson.""" from epson_projector.const import (CMODE_LIST_SET) await self._projector.send_command(CMODE_LIST_SET[cmode]) async def async_select_source(self, source): """Select input source.""" from epson_projector.const import INV_SOURCES selected_source = INV_SOURCES[source] await self._projector.send_command(selected_source) async def async_mute_volume(self, mute): """Mute (true) or unmute (false) sound.""" from epson_projector.const import MUTE await self._projector.send_command(MUTE) async def async_volume_up(self): """Increase volume.""" from epson_projector.const import VOL_UP await self._projector.send_command(VOL_UP) async def async_volume_down(self): """Decrease volume.""" from epson_projector.const import VOL_DOWN await self._projector.send_command(VOL_DOWN) async def async_media_play(self): """Play media via Epson.""" from epson_projector.const import PLAY await self._projector.send_command(PLAY) async def async_media_pause(self): """Pause media via Epson.""" from epson_projector.const import PAUSE await self._projector.send_command(PAUSE) async def async_media_next_track(self): """Skip to next.""" from epson_projector.const import FAST await self._projector.send_command(FAST) async def async_media_previous_track(self): """Skip to previous.""" from epson_projector.const import BACK await self._projector.send_command(BACK) @property def device_state_attributes(self): """Return device specific state attributes.""" attributes = {} if self._cmode is not None: attributes[ATTR_CMODE] = self._cmode return attributes
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/media_player/epson.py
""" Support for Mikrotik routers as device tracker. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/device_tracker.mikrotik/ """ import logging import ssl import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT, CONF_SSL, CONF_METHOD) REQUIREMENTS = ['librouteros==2.2.0'] _LOGGER = logging.getLogger(__name__) MTK_DEFAULT_API_PORT = '8728' MTK_DEFAULT_API_SSL_PORT = '8729' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_METHOD): cv.string, vol.Optional(CONF_PORT): cv.port, vol.Optional(CONF_SSL, default=False): cv.boolean }) def get_scanner(hass, config): """Validate the configuration and return MTikScanner.""" scanner = MikrotikScanner(config[DOMAIN]) return scanner if scanner.success_init else None class MikrotikScanner(DeviceScanner): """This class queries a Mikrotik router.""" def __init__(self, config): """Initialize the scanner.""" self.last_results = {} self.host = config[CONF_HOST] self.ssl = config[CONF_SSL] try: self.port = config[CONF_PORT] except KeyError: if self.ssl: self.port = MTK_DEFAULT_API_SSL_PORT else: self.port = MTK_DEFAULT_API_PORT self.username = config[CONF_USERNAME] self.password = config[CONF_PASSWORD] self.method = config.get(CONF_METHOD) self.connected = False self.success_init = False self.client = None self.wireless_exist = None self.success_init = self.connect_to_device() if self.success_init: _LOGGER.info("Start polling Mikrotik (%s) router...", self.host) self._update_info() else: _LOGGER.error("Connection to Mikrotik (%s) failed", self.host) def connect_to_device(self): """Connect to Mikrotik method.""" import librouteros try: kwargs = { 'port': self.port, 'encoding': 'utf-8' } if self.ssl: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE kwargs['ssl_wrapper'] = ssl_context.wrap_socket self.client = librouteros.connect( self.host, self.username, self.password, **kwargs ) try: routerboard_info = self.client( cmd='/system/routerboard/getall') except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError): routerboard_info = None raise if routerboard_info: _LOGGER.info( "Connected to Mikrotik %s with IP %s", routerboard_info[0].get('model', 'Router'), self.host) self.connected = True try: self.capsman_exist = self.client( cmd='/caps-man/interface/getall') except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError): self.capsman_exist = False if not self.capsman_exist: _LOGGER.info( "Mikrotik %s: Not a CAPSman controller. Trying " "local interfaces", self.host) try: self.wireless_exist = self.client( cmd='/interface/wireless/getall') except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError): self.wireless_exist = False if not self.wireless_exist and not self.capsman_exist \ or self.method == 'ip': _LOGGER.info( "Mikrotik %s: Wireless adapters not found. Try to " "use DHCP lease table as presence tracker source. " "Please decrease lease time as much as possible", self.host) if self.method: _LOGGER.info( "Mikrotik %s: Manually selected polling method %s", self.host, self.method) except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError) as api_error: _LOGGER.error("Connection error: %s", api_error) return self.connected def scan_devices(self): """Scan for new devices and return a list with found device MACs.""" import librouteros try: self._update_info() except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError) as api_error: _LOGGER.error("Connection error: %s", api_error) self.connect_to_device() return [device for device in self.last_results] def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" return self.last_results.get(device) def _update_info(self): """Retrieve latest information from the Mikrotik box.""" if self.method: devices_tracker = self.method else: if self.capsman_exist: devices_tracker = 'capsman' elif self.wireless_exist: devices_tracker = 'wireless' else: devices_tracker = 'ip' _LOGGER.debug( "Loading %s devices from Mikrotik (%s) ...", devices_tracker, self.host) device_names = self.client(cmd='/ip/dhcp-server/lease/getall') if devices_tracker == 'capsman': devices = self.client( cmd='/caps-man/registration-table/getall') elif devices_tracker == 'wireless': devices = self.client( cmd='/interface/wireless/registration-table/getall') else: devices = device_names if device_names is None and devices is None: return False mac_names = {device.get('mac-address'): device.get('host-name') for device in device_names if device.get('mac-address')} if devices_tracker in ('wireless', 'capsman'): self.last_results = { device.get('mac-address'): mac_names.get(device.get('mac-address')) for device in devices} else: self.last_results = { device.get('mac-address'): mac_names.get(device.get('mac-address')) for device in device_names if device.get('active-address')} return True
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/device_tracker/mikrotik.py
""" Interfaces with iAlarm control panels. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/alarm_control_panel.ialarm/ """ import logging import re import voluptuous as vol import homeassistant.components.alarm_control_panel as alarm from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['pyialarm==0.3'] _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'iAlarm' def no_application_protocol(value): """Validate that value is without the application protocol.""" protocol_separator = "://" if not value or protocol_separator in value: raise vol.Invalid( 'Invalid host, {} is not allowed'.format(protocol_separator)) return value PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): vol.All(cv.string, no_application_protocol), vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_CODE): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up an iAlarm control panel.""" name = config.get(CONF_NAME) code = config.get(CONF_CODE) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) host = config.get(CONF_HOST) url = 'http://{}'.format(host) ialarm = IAlarmPanel(name, code, username, password, url) add_entities([ialarm], True) class IAlarmPanel(alarm.AlarmControlPanel): """Representation of an iAlarm status.""" def __init__(self, name, code, username, password, url): """Initialize the iAlarm status.""" from pyialarm import IAlarm self._name = name self._code = str(code) if code else None self._username = username self._password = password self._url = url self._state = None self._client = IAlarm(username, password, url) @property def name(self): """Return the name of the device.""" return self._name @property def code_format(self): """Return one or more digits/characters.""" if self._code is None: return None if isinstance(self._code, str) and re.search('^\\d+$', self._code): return alarm.FORMAT_NUMBER return alarm.FORMAT_TEXT @property def state(self): """Return the state of the device.""" return self._state def update(self): """Return the state of the device.""" status = self._client.get_status() _LOGGER.debug('iAlarm status: %s', status) if status: status = int(status) if status == self._client.DISARMED: state = STATE_ALARM_DISARMED elif status == self._client.ARMED_AWAY: state = STATE_ALARM_ARMED_AWAY elif status == self._client.ARMED_STAY: state = STATE_ALARM_ARMED_HOME elif status == self._client.TRIGGERED: state = STATE_ALARM_TRIGGERED else: state = None self._state = state def alarm_disarm(self, code=None): """Send disarm command.""" if self._validate_code(code): self._client.disarm() def alarm_arm_away(self, code=None): """Send arm away command.""" if self._validate_code(code): self._client.arm_away() def alarm_arm_home(self, code=None): """Send arm home command.""" if self._validate_code(code): self._client.arm_stay() def _validate_code(self, code): """Validate given code.""" check = self._code is None or code == self._code if not check: _LOGGER.warning("Wrong code entered") return check
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/alarm_control_panel/ialarm.py
""" This platform provides sensors for OpenUV data. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.openuv/ """ import logging from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.components.openuv import ( DATA_OPENUV_CLIENT, DATA_UV, DOMAIN, SENSORS, TOPIC_UPDATE, TYPE_CURRENT_OZONE_LEVEL, TYPE_CURRENT_UV_INDEX, TYPE_CURRENT_UV_LEVEL, TYPE_MAX_UV_INDEX, TYPE_SAFE_EXPOSURE_TIME_1, TYPE_SAFE_EXPOSURE_TIME_2, TYPE_SAFE_EXPOSURE_TIME_3, TYPE_SAFE_EXPOSURE_TIME_4, TYPE_SAFE_EXPOSURE_TIME_5, TYPE_SAFE_EXPOSURE_TIME_6, OpenUvEntity) from homeassistant.util.dt import as_local, parse_datetime DEPENDENCIES = ['openuv'] _LOGGER = logging.getLogger(__name__) ATTR_MAX_UV_TIME = 'time' EXPOSURE_TYPE_MAP = { TYPE_SAFE_EXPOSURE_TIME_1: 'st1', TYPE_SAFE_EXPOSURE_TIME_2: 'st2', TYPE_SAFE_EXPOSURE_TIME_3: 'st3', TYPE_SAFE_EXPOSURE_TIME_4: 'st4', TYPE_SAFE_EXPOSURE_TIME_5: 'st5', TYPE_SAFE_EXPOSURE_TIME_6: 'st6' } UV_LEVEL_EXTREME = "Extreme" UV_LEVEL_VHIGH = "Very High" UV_LEVEL_HIGH = "High" UV_LEVEL_MODERATE = "Moderate" UV_LEVEL_LOW = "Low" async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up an OpenUV sensor based on existing config.""" pass async def async_setup_entry(hass, entry, async_add_entities): """Set up a Nest sensor based on a config entry.""" openuv = hass.data[DOMAIN][DATA_OPENUV_CLIENT][entry.entry_id] sensors = [] for sensor_type in openuv.sensor_conditions: name, icon, unit = SENSORS[sensor_type] sensors.append( OpenUvSensor( openuv, sensor_type, name, icon, unit, entry.entry_id)) async_add_entities(sensors, True) class OpenUvSensor(OpenUvEntity): """Define a binary sensor for OpenUV.""" def __init__(self, openuv, sensor_type, name, icon, unit, entry_id): """Initialize the sensor.""" super().__init__(openuv) self._async_unsub_dispatcher_connect = None self._entry_id = entry_id self._icon = icon self._latitude = openuv.client.latitude self._longitude = openuv.client.longitude self._name = name self._sensor_type = sensor_type self._state = None self._unit = unit @property def icon(self): """Return the icon.""" return self._icon @property def should_poll(self): """Disable polling.""" return False @property def state(self): """Return the status of the sensor.""" return self._state @property def unique_id(self) -> str: """Return a unique, HASS-friendly identifier for this entity.""" return '{0}_{1}_{2}'.format( self._latitude, self._longitude, self._sensor_type) @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.async_schedule_update_ha_state(True) self._async_unsub_dispatcher_connect = async_dispatcher_connect( self.hass, TOPIC_UPDATE, update) async def async_will_remove_from_hass(self): """Disconnect dispatcher listener when removed.""" if self._async_unsub_dispatcher_connect: self._async_unsub_dispatcher_connect() async def async_update(self): """Update the state.""" data = self.openuv.data[DATA_UV]['result'] if self._sensor_type == TYPE_CURRENT_OZONE_LEVEL: self._state = data['ozone'] elif self._sensor_type == TYPE_CURRENT_UV_INDEX: self._state = data['uv'] elif self._sensor_type == TYPE_CURRENT_UV_LEVEL: if data['uv'] >= 11: self._state = UV_LEVEL_EXTREME elif data['uv'] >= 8: self._state = UV_LEVEL_VHIGH elif data['uv'] >= 6: self._state = UV_LEVEL_HIGH elif data['uv'] >= 3: self._state = UV_LEVEL_MODERATE else: self._state = UV_LEVEL_LOW elif self._sensor_type == TYPE_MAX_UV_INDEX: self._state = data['uv_max'] self._attrs.update({ ATTR_MAX_UV_TIME: as_local(parse_datetime(data['uv_max_time'])) }) elif self._sensor_type in (TYPE_SAFE_EXPOSURE_TIME_1, TYPE_SAFE_EXPOSURE_TIME_2, TYPE_SAFE_EXPOSURE_TIME_3, TYPE_SAFE_EXPOSURE_TIME_4, TYPE_SAFE_EXPOSURE_TIME_5, TYPE_SAFE_EXPOSURE_TIME_6): self._state = data['safe_exposure_time'][EXPOSURE_TYPE_MAP[ self._sensor_type]]
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/openuv/sensor.py
""" Support for the Daikin HVAC. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/climate.daikin/ """ import logging import re import voluptuous as vol from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, ATTR_OPERATION_MODE, ATTR_SWING_MODE, PLATFORM_SCHEMA, STATE_AUTO, STATE_COOL, STATE_DRY, STATE_FAN_ONLY, STATE_HEAT, STATE_OFF, SUPPORT_FAN_MODE, SUPPORT_OPERATION_MODE, SUPPORT_SWING_MODE, SUPPORT_TARGET_TEMPERATURE, ClimateDevice) from homeassistant.components.daikin import DOMAIN as DAIKIN_DOMAIN from homeassistant.components.daikin.const import ( ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, ATTR_TARGET_TEMPERATURE) from homeassistant.const import ( ATTR_TEMPERATURE, CONF_HOST, CONF_NAME, TEMP_CELSIUS) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string, }) HA_STATE_TO_DAIKIN = { STATE_FAN_ONLY: 'fan', STATE_DRY: 'dry', STATE_COOL: 'cool', STATE_HEAT: 'hot', STATE_AUTO: 'auto', STATE_OFF: 'off', } DAIKIN_TO_HA_STATE = { 'fan': STATE_FAN_ONLY, 'dry': STATE_DRY, 'cool': STATE_COOL, 'hot': STATE_HEAT, 'auto': STATE_AUTO, 'off': STATE_OFF, } HA_ATTR_TO_DAIKIN = { ATTR_OPERATION_MODE: 'mode', ATTR_FAN_MODE: 'f_rate', ATTR_SWING_MODE: 'f_dir', ATTR_INSIDE_TEMPERATURE: 'htemp', ATTR_OUTSIDE_TEMPERATURE: 'otemp', ATTR_TARGET_TEMPERATURE: 'stemp' } def setup_platform(hass, config, add_entities, discovery_info=None): """Old way of setting up the Daikin HVAC platform. Can only be called when a user accidentally mentions the platform in their config. But even in that case it would have been ignored. """ pass async def async_setup_entry(hass, entry, async_add_entities): """Set up Daikin climate based on config_entry.""" daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) async_add_entities([DaikinClimate(daikin_api)]) class DaikinClimate(ClimateDevice): """Representation of a Daikin HVAC.""" def __init__(self, api): """Initialize the climate device.""" from pydaikin import appliance self._api = api self._list = { ATTR_OPERATION_MODE: list(HA_STATE_TO_DAIKIN), ATTR_FAN_MODE: list( map( str.title, appliance.daikin_values(HA_ATTR_TO_DAIKIN[ATTR_FAN_MODE]) ) ), ATTR_SWING_MODE: list( map( str.title, appliance.daikin_values(HA_ATTR_TO_DAIKIN[ATTR_SWING_MODE]) ) ), } self._supported_features = SUPPORT_TARGET_TEMPERATURE \ | SUPPORT_OPERATION_MODE if self._api.device.support_fan_mode: self._supported_features |= SUPPORT_FAN_MODE if self._api.device.support_swing_mode: self._supported_features |= SUPPORT_SWING_MODE def get(self, key): """Retrieve device settings from API library cache.""" value = None cast_to_float = False if key in [ATTR_TEMPERATURE, ATTR_INSIDE_TEMPERATURE, ATTR_CURRENT_TEMPERATURE]: key = ATTR_INSIDE_TEMPERATURE daikin_attr = HA_ATTR_TO_DAIKIN.get(key) if key == ATTR_INSIDE_TEMPERATURE: value = self._api.device.values.get(daikin_attr) cast_to_float = True elif key == ATTR_TARGET_TEMPERATURE: value = self._api.device.values.get(daikin_attr) cast_to_float = True elif key == ATTR_OUTSIDE_TEMPERATURE: value = self._api.device.values.get(daikin_attr) cast_to_float = True elif key == ATTR_FAN_MODE: value = self._api.device.represent(daikin_attr)[1].title() elif key == ATTR_SWING_MODE: value = self._api.device.represent(daikin_attr)[1].title() elif key == ATTR_OPERATION_MODE: # Daikin can return also internal states auto-1 or auto-7 # and we need to translate them as AUTO daikin_mode = re.sub( '[^a-z]', '', self._api.device.represent(daikin_attr)[1]) ha_mode = DAIKIN_TO_HA_STATE.get(daikin_mode) value = ha_mode if value is None: _LOGGER.error("Invalid value requested for key %s", key) else: if value in ("-", "--"): value = None elif cast_to_float: try: value = float(value) except ValueError: value = None return value def set(self, settings): """Set device settings using API.""" values = {} for attr in [ATTR_TEMPERATURE, ATTR_FAN_MODE, ATTR_SWING_MODE, ATTR_OPERATION_MODE]: value = settings.get(attr) if value is None: continue daikin_attr = HA_ATTR_TO_DAIKIN.get(attr) if daikin_attr is not None: if attr == ATTR_OPERATION_MODE: values[daikin_attr] = HA_STATE_TO_DAIKIN[value] elif value in self._list[attr]: values[daikin_attr] = value.lower() else: _LOGGER.error("Invalid value %s for %s", attr, value) # temperature elif attr == ATTR_TEMPERATURE: try: values['stemp'] = str(int(value)) except ValueError: _LOGGER.error("Invalid temperature %s", value) if values: self._api.device.set(values) @property def supported_features(self): """Return the list of supported features.""" return self._supported_features @property def name(self): """Return the name of the thermostat, if any.""" return self._api.name @property def unique_id(self): """Return a unique ID.""" return self._api.mac @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self.get(ATTR_CURRENT_TEMPERATURE) @property def target_temperature(self): """Return the temperature we try to reach.""" return self.get(ATTR_TARGET_TEMPERATURE) @property def target_temperature_step(self): """Return the supported step of target temperature.""" return 1 def set_temperature(self, **kwargs): """Set new target temperature.""" self.set(kwargs) @property def current_operation(self): """Return current operation ie. heat, cool, idle.""" return self.get(ATTR_OPERATION_MODE) @property def operation_list(self): """Return the list of available operation modes.""" return self._list.get(ATTR_OPERATION_MODE) def set_operation_mode(self, operation_mode): """Set HVAC mode.""" self.set({ATTR_OPERATION_MODE: operation_mode}) @property def current_fan_mode(self): """Return the fan setting.""" return self.get(ATTR_FAN_MODE) def set_fan_mode(self, fan_mode): """Set fan mode.""" self.set({ATTR_FAN_MODE: fan_mode}) @property def fan_list(self): """List of available fan modes.""" return self._list.get(ATTR_FAN_MODE) @property def current_swing_mode(self): """Return the fan setting.""" return self.get(ATTR_SWING_MODE) def set_swing_mode(self, swing_mode): """Set new target temperature.""" self.set({ATTR_SWING_MODE: swing_mode}) @property def swing_list(self): """List of available swing modes.""" return self._list.get(ATTR_SWING_MODE) def update(self): """Retrieve latest state.""" self._api.update() @property def device_info(self): """Return a device description for device registry.""" return self._api.device_info
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/daikin/climate.py
""" Binary sensors on Zigbee Home Automation networks. For more details on this platform, please refer to the documentation at https://home-assistant.io/components/binary_sensor.zha/ """ import logging from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDevice from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core.const import ( DATA_ZHA, DATA_ZHA_DISPATCHERS, ZHA_DISCOVERY_NEW, LISTENER_ON_OFF, LISTENER_LEVEL, LISTENER_ZONE, SIGNAL_ATTR_UPDATED, SIGNAL_MOVE_LEVEL, SIGNAL_SET_LEVEL, LISTENER_ATTRIBUTE, UNKNOWN, OPENING, ZONE, OCCUPANCY, ATTR_LEVEL, SENSOR_TYPE) from .entity import ZhaEntity _LOGGER = logging.getLogger(__name__) DEPENDENCIES = ['zha'] # Zigbee Cluster Library Zone Type to Home Assistant device class CLASS_MAPPING = { 0x000d: 'motion', 0x0015: 'opening', 0x0028: 'smoke', 0x002a: 'moisture', 0x002b: 'gas', 0x002d: 'vibration', } async def get_ias_device_class(listener): """Get the HA device class from the listener.""" zone_type = await listener.get_attribute_value('zone_type') return CLASS_MAPPING.get(zone_type) DEVICE_CLASS_REGISTRY = { UNKNOWN: None, OPENING: OPENING, ZONE: get_ias_device_class, OCCUPANCY: OCCUPANCY, } async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Old way of setting up Zigbee Home Automation binary sensors.""" pass async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation binary sensor from config entry.""" async def async_discover(discovery_info): await _async_setup_entities(hass, config_entry, async_add_entities, [discovery_info]) unsub = async_dispatcher_connect( hass, ZHA_DISCOVERY_NEW.format(DOMAIN), async_discover) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) binary_sensors = hass.data.get(DATA_ZHA, {}).get(DOMAIN) if binary_sensors is not None: await _async_setup_entities(hass, config_entry, async_add_entities, binary_sensors.values()) del hass.data[DATA_ZHA][DOMAIN] async def _async_setup_entities(hass, config_entry, async_add_entities, discovery_infos): """Set up the ZHA binary sensors.""" entities = [] for discovery_info in discovery_infos: entities.append(BinarySensor(**discovery_info)) async_add_entities(entities, update_before_add=True) class BinarySensor(ZhaEntity, BinarySensorDevice): """ZHA BinarySensor.""" _domain = DOMAIN _device_class = None def __init__(self, **kwargs): """Initialize the ZHA binary sensor.""" super().__init__(**kwargs) self._device_state_attributes = {} self._zone_listener = self.cluster_listeners.get(LISTENER_ZONE) self._on_off_listener = self.cluster_listeners.get(LISTENER_ON_OFF) self._level_listener = self.cluster_listeners.get(LISTENER_LEVEL) self._attr_listener = self.cluster_listeners.get(LISTENER_ATTRIBUTE) self._zha_sensor_type = kwargs[SENSOR_TYPE] self._level = None async def _determine_device_class(self): """Determine the device class for this binary sensor.""" device_class_supplier = DEVICE_CLASS_REGISTRY.get( self._zha_sensor_type) if callable(device_class_supplier): listener = self.cluster_listeners.get(self._zha_sensor_type) if listener is None: return None return await device_class_supplier(listener) return device_class_supplier async def async_added_to_hass(self): """Run when about to be added to hass.""" self._device_class = await self._determine_device_class() await super().async_added_to_hass() if self._level_listener: await self.async_accept_signal( self._level_listener, SIGNAL_SET_LEVEL, self.set_level) await self.async_accept_signal( self._level_listener, SIGNAL_MOVE_LEVEL, self.move_level) if self._on_off_listener: await self.async_accept_signal( self._on_off_listener, SIGNAL_ATTR_UPDATED, self.async_set_state) if self._zone_listener: await self.async_accept_signal( self._zone_listener, SIGNAL_ATTR_UPDATED, self.async_set_state) if self._attr_listener: await self.async_accept_signal( self._attr_listener, SIGNAL_ATTR_UPDATED, self.async_set_state) @property def is_on(self) -> bool: """Return if the switch is on based on the statemachine.""" if self._state is None: return False return self._state @property def device_class(self) -> str: """Return device class from component DEVICE_CLASSES.""" return self._device_class def async_set_state(self, state): """Set the state.""" self._state = bool(state) self.async_schedule_update_ha_state() def move_level(self, change): """Increment the level, setting state if appropriate.""" level = self._level or 0 if not self._state and change > 0: level = 0 self._level = min(254, max(0, level + change)) self._state = bool(self._level) self.async_schedule_update_ha_state() def set_level(self, level): """Set the level, setting state if appropriate.""" self._level = level self._state = bool(level) self.async_schedule_update_ha_state() @property def device_state_attributes(self): """Return the device state attributes.""" if self._level_listener is not None: self._device_state_attributes.update({ ATTR_LEVEL: self._state and self._level or 0 }) return self._device_state_attributes
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/zha/binary_sensor.py
"""Middleware to fetch real IP.""" from ipaddress import ip_address from aiohttp.web import middleware from aiohttp.hdrs import X_FORWARDED_FOR from homeassistant.core import callback from .const import KEY_REAL_IP @callback def setup_real_ip(app, use_x_forwarded_for, trusted_proxies): """Create IP Ban middleware for the app.""" @middleware async def real_ip_middleware(request, handler): """Real IP middleware.""" connected_ip = ip_address( request.transport.get_extra_info('peername')[0]) request[KEY_REAL_IP] = connected_ip # Only use the XFF header if enabled, present, and from a trusted proxy try: if (use_x_forwarded_for and X_FORWARDED_FOR in request.headers and any(connected_ip in trusted_proxy for trusted_proxy in trusted_proxies)): request[KEY_REAL_IP] = ip_address( request.headers.get(X_FORWARDED_FOR).split(', ')[-1]) except ValueError: pass return await handler(request) app.middlewares.append(real_ip_middleware)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/http/real_ip.py
""" Support for IHC devices. For more details about this component, please refer to the documentation at https://home-assistant.io/components/ihc/ """ import logging import os.path import voluptuous as vol from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA from homeassistant.components.ihc.const import ( ATTR_IHC_ID, ATTR_VALUE, CONF_AUTOSETUP, CONF_BINARY_SENSOR, CONF_DIMMABLE, CONF_INFO, CONF_INVERTING, CONF_LIGHT, CONF_NODE, CONF_NOTE, CONF_POSITION, CONF_SENSOR, CONF_SWITCH, CONF_XPATH, SERVICE_SET_RUNTIME_VALUE_BOOL, SERVICE_SET_RUNTIME_VALUE_FLOAT, SERVICE_SET_RUNTIME_VALUE_INT) from homeassistant.config import load_yaml_config_file from homeassistant.const import ( CONF_ID, CONF_NAME, CONF_PASSWORD, CONF_TYPE, CONF_UNIT_OF_MEASUREMENT, CONF_URL, CONF_USERNAME, TEMP_CELSIUS) from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import HomeAssistantType REQUIREMENTS = ['ihcsdk==2.2.0', 'defusedxml==0.5.0'] _LOGGER = logging.getLogger(__name__) AUTO_SETUP_YAML = 'ihc_auto_setup.yaml' DOMAIN = 'ihc' IHC_CONTROLLER = 'controller' IHC_DATA = 'ihc{}' IHC_INFO = 'info' IHC_PLATFORMS = ('binary_sensor', 'light', 'sensor', 'switch') def validate_name(config): """Validate the device name.""" if CONF_NAME in config: return config ihcid = config[CONF_ID] name = 'ihc_{}'.format(ihcid) config[CONF_NAME] = name return config DEVICE_SCHEMA = vol.Schema({ vol.Required(CONF_ID): cv.positive_int, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_NOTE): cv.string, vol.Optional(CONF_POSITION): cv.string, }) SWITCH_SCHEMA = DEVICE_SCHEMA.extend({}) BINARY_SENSOR_SCHEMA = DEVICE_SCHEMA.extend({ vol.Optional(CONF_INVERTING, default=False): cv.boolean, vol.Optional(CONF_TYPE): DEVICE_CLASSES_SCHEMA, }) LIGHT_SCHEMA = DEVICE_SCHEMA.extend({ vol.Optional(CONF_DIMMABLE, default=False): cv.boolean, }) SENSOR_SCHEMA = DEVICE_SCHEMA.extend({ vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=TEMP_CELSIUS): cv.string, }) IHC_SCHEMA = vol.Schema({ vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_URL): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_AUTOSETUP, default=True): cv.boolean, vol.Optional(CONF_BINARY_SENSOR, default=[]): vol.All( cv.ensure_list, [vol.All(BINARY_SENSOR_SCHEMA, validate_name)]), vol.Optional(CONF_INFO, default=True): cv.boolean, vol.Optional(CONF_LIGHT, default=[]): vol.All(cv.ensure_list, [vol.All(LIGHT_SCHEMA, validate_name)]), vol.Optional(CONF_SENSOR, default=[]): vol.All(cv.ensure_list, [vol.All(SENSOR_SCHEMA, validate_name)]), vol.Optional(CONF_SWITCH, default=[]): vol.All(cv.ensure_list, [vol.All(SWITCH_SCHEMA, validate_name)]), }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema(vol.All(cv.ensure_list, [IHC_SCHEMA])), }, extra=vol.ALLOW_EXTRA) AUTO_SETUP_SCHEMA = vol.Schema({ vol.Optional(CONF_BINARY_SENSOR, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, vol.Optional(CONF_INVERTING, default=False): cv.boolean, vol.Optional(CONF_TYPE): cv.string, }) ]), vol.Optional(CONF_LIGHT, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, vol.Optional(CONF_DIMMABLE, default=False): cv.boolean, }) ]), vol.Optional(CONF_SENSOR, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=TEMP_CELSIUS): cv.string, }) ]), vol.Optional(CONF_SWITCH, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, }) ]), }) SET_RUNTIME_VALUE_BOOL_SCHEMA = vol.Schema({ vol.Required(ATTR_IHC_ID): cv.positive_int, vol.Required(ATTR_VALUE): cv.boolean, }) SET_RUNTIME_VALUE_INT_SCHEMA = vol.Schema({ vol.Required(ATTR_IHC_ID): cv.positive_int, vol.Required(ATTR_VALUE): int, }) SET_RUNTIME_VALUE_FLOAT_SCHEMA = vol.Schema({ vol.Required(ATTR_IHC_ID): cv.positive_int, vol.Required(ATTR_VALUE): vol.Coerce(float), }) def setup(hass, config): """Set up the IHC platform.""" conf = config.get(DOMAIN) for index, controller_conf in enumerate(conf): if not ihc_setup(hass, config, controller_conf, index): return False return True def ihc_setup(hass, config, conf, controller_id): """Set up the IHC component.""" from ihcsdk.ihccontroller import IHCController url = conf[CONF_URL] username = conf[CONF_USERNAME] password = conf[CONF_PASSWORD] ihc_controller = IHCController(url, username, password) if not ihc_controller.authenticate(): _LOGGER.error("Unable to authenticate on IHC controller") return False if (conf[CONF_AUTOSETUP] and not autosetup_ihc_products( hass, config, ihc_controller, controller_id)): return False # Manual configuration get_manual_configuration( hass, config, conf, ihc_controller, controller_id) # Store controller configuration ihc_key = IHC_DATA.format(controller_id) hass.data[ihc_key] = { IHC_CONTROLLER: ihc_controller, IHC_INFO: conf[CONF_INFO]} setup_service_functions(hass, ihc_controller) return True def get_manual_configuration( hass, config, conf, ihc_controller, controller_id): """Get manual configuration for IHC devices.""" for component in IHC_PLATFORMS: discovery_info = {} if component in conf: component_setup = conf.get(component) for sensor_cfg in component_setup: name = sensor_cfg[CONF_NAME] device = { 'ihc_id': sensor_cfg[CONF_ID], 'ctrl_id': controller_id, 'product': { 'name': name, 'note': sensor_cfg.get(CONF_NOTE) or '', 'position': sensor_cfg.get(CONF_POSITION) or ''}, 'product_cfg': { 'type': sensor_cfg.get(CONF_TYPE), 'inverting': sensor_cfg.get(CONF_INVERTING), 'dimmable': sensor_cfg.get(CONF_DIMMABLE), 'unit_of_measurement': sensor_cfg.get( CONF_UNIT_OF_MEASUREMENT) } } discovery_info[name] = device if discovery_info: discovery.load_platform( hass, component, DOMAIN, discovery_info, config) def autosetup_ihc_products(hass: HomeAssistantType, config, ihc_controller, controller_id): """Auto setup of IHC products from the IHC project file.""" from defusedxml import ElementTree project_xml = ihc_controller.get_project() if not project_xml: _LOGGER.error("Unable to read project from IHC controller") return False project = ElementTree.fromstring(project_xml) # if an auto setup file exist in the configuration it will override yaml_path = hass.config.path(AUTO_SETUP_YAML) if not os.path.isfile(yaml_path): yaml_path = os.path.join(os.path.dirname(__file__), AUTO_SETUP_YAML) yaml = load_yaml_config_file(yaml_path) try: auto_setup_conf = AUTO_SETUP_SCHEMA(yaml) except vol.Invalid as exception: _LOGGER.error("Invalid IHC auto setup data: %s", exception) return False groups = project.findall('.//group') for component in IHC_PLATFORMS: component_setup = auto_setup_conf[component] discovery_info = get_discovery_info( component_setup, groups, controller_id) if discovery_info: discovery.load_platform( hass, component, DOMAIN, discovery_info, config) return True def get_discovery_info(component_setup, groups, controller_id): """Get discovery info for specified IHC component.""" discovery_data = {} for group in groups: groupname = group.attrib['name'] for product_cfg in component_setup: products = group.findall(product_cfg[CONF_XPATH]) for product in products: nodes = product.findall(product_cfg[CONF_NODE]) for node in nodes: if ('setting' in node.attrib and node.attrib['setting'] == 'yes'): continue ihc_id = int(node.attrib['id'].strip('_'), 0) name = '{}_{}'.format(groupname, ihc_id) device = { 'ihc_id': ihc_id, 'ctrl_id': controller_id, 'product': { 'name': product.get('name') or '', 'note': product.get('note') or '', 'position': product.get('position') or ''}, 'product_cfg': product_cfg} discovery_data[name] = device return discovery_data def setup_service_functions(hass: HomeAssistantType, ihc_controller): """Set up the IHC service functions.""" def set_runtime_value_bool(call): """Set a IHC runtime bool value service function.""" ihc_id = call.data[ATTR_IHC_ID] value = call.data[ATTR_VALUE] ihc_controller.set_runtime_value_bool(ihc_id, value) def set_runtime_value_int(call): """Set a IHC runtime integer value service function.""" ihc_id = call.data[ATTR_IHC_ID] value = call.data[ATTR_VALUE] ihc_controller.set_runtime_value_int(ihc_id, value) def set_runtime_value_float(call): """Set a IHC runtime float value service function.""" ihc_id = call.data[ATTR_IHC_ID] value = call.data[ATTR_VALUE] ihc_controller.set_runtime_value_float(ihc_id, value) hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_BOOL, set_runtime_value_bool, schema=SET_RUNTIME_VALUE_BOOL_SCHEMA) hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_INT, set_runtime_value_int, schema=SET_RUNTIME_VALUE_INT_SCHEMA) hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_FLOAT, set_runtime_value_float, schema=SET_RUNTIME_VALUE_FLOAT_SCHEMA)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/ihc/__init__.py
""" Support for Geofency. For more details about this component, please refer to the documentation at https://home-assistant.io/components/geofency/ """ import logging import voluptuous as vol from aiohttp import web import homeassistant.helpers.config_validation as cv from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER from homeassistant.const import HTTP_UNPROCESSABLE_ENTITY, STATE_NOT_HOME, \ ATTR_LATITUDE, ATTR_LONGITUDE, CONF_WEBHOOK_ID, HTTP_OK, ATTR_NAME from homeassistant.helpers import config_entry_flow from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.util import slugify _LOGGER = logging.getLogger(__name__) DOMAIN = 'geofency' DEPENDENCIES = ['webhook'] CONF_MOBILE_BEACONS = 'mobile_beacons' CONFIG_SCHEMA = vol.Schema({ vol.Optional(DOMAIN): vol.Schema({ vol.Optional(CONF_MOBILE_BEACONS, default=[]): vol.All( cv.ensure_list, [cv.string] ), }), }, extra=vol.ALLOW_EXTRA) ATTR_ADDRESS = 'address' ATTR_BEACON_ID = 'beaconUUID' ATTR_CURRENT_LATITUDE = 'currentLatitude' ATTR_CURRENT_LONGITUDE = 'currentLongitude' ATTR_DEVICE = 'device' ATTR_ENTRY = 'entry' BEACON_DEV_PREFIX = 'beacon' LOCATION_ENTRY = '1' LOCATION_EXIT = '0' TRACKER_UPDATE = '{}_tracker_update'.format(DOMAIN) def _address(value: str) -> str: r"""Coerce address by replacing '\n' with ' '.""" return value.replace('\n', ' ') WEBHOOK_SCHEMA = vol.Schema({ vol.Required(ATTR_ADDRESS): vol.All(cv.string, _address), vol.Required(ATTR_DEVICE): vol.All(cv.string, slugify), vol.Required(ATTR_ENTRY): vol.Any(LOCATION_ENTRY, LOCATION_EXIT), vol.Required(ATTR_LATITUDE): cv.latitude, vol.Required(ATTR_LONGITUDE): cv.longitude, vol.Required(ATTR_NAME): vol.All(cv.string, slugify), vol.Optional(ATTR_CURRENT_LATITUDE): cv.latitude, vol.Optional(ATTR_CURRENT_LONGITUDE): cv.longitude, vol.Optional(ATTR_BEACON_ID): cv.string }, extra=vol.ALLOW_EXTRA) async def async_setup(hass, hass_config): """Set up the Geofency component.""" config = hass_config.get(DOMAIN, {}) mobile_beacons = config.get(CONF_MOBILE_BEACONS, []) hass.data[DOMAIN] = [slugify(beacon) for beacon in mobile_beacons] return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook from Geofency.""" try: data = WEBHOOK_SCHEMA(dict(await request.post())) except vol.MultipleInvalid as error: return web.Response( body=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY ) if _is_mobile_beacon(data, hass.data[DOMAIN]): return _set_location(hass, data, None) if data['entry'] == LOCATION_ENTRY: location_name = data['name'] else: location_name = STATE_NOT_HOME if ATTR_CURRENT_LATITUDE in data: data[ATTR_LATITUDE] = data[ATTR_CURRENT_LATITUDE] data[ATTR_LONGITUDE] = data[ATTR_CURRENT_LONGITUDE] return _set_location(hass, data, location_name) def _is_mobile_beacon(data, mobile_beacons): """Check if we have a mobile beacon.""" return ATTR_BEACON_ID in data and data['name'] in mobile_beacons def _device_name(data): """Return name of device tracker.""" if ATTR_BEACON_ID in data: return "{}_{}".format(BEACON_DEV_PREFIX, data['name']) return data['device'] def _set_location(hass, data, location_name): """Fire HA event to set location.""" device = _device_name(data) async_dispatcher_send( hass, TRACKER_UPDATE, device, (data[ATTR_LATITUDE], data[ATTR_LONGITUDE]), location_name, data ) return web.Response( text="Setting location for {}".format(device), status=HTTP_OK ) async def async_setup_entry(hass, entry): """Configure based on config entry.""" hass.components.webhook.async_register( DOMAIN, 'Geofency', entry.data[CONF_WEBHOOK_ID], handle_webhook) hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER) ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER) return True config_entry_flow.register_webhook_flow( DOMAIN, 'Geofency Webhook', { 'docs_url': 'https://www.home-assistant.io/components/geofency/' } )
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/geofency/__init__.py
""" Platform for the Aladdin Connect cover component. For more details about this platform, please refer to the documentation https://home-assistant.io/components/cover.aladdin_connect/ """ import logging import voluptuous as vol from homeassistant.components.cover import (CoverDevice, PLATFORM_SCHEMA, SUPPORT_OPEN, SUPPORT_CLOSE) from homeassistant.const import (CONF_USERNAME, CONF_PASSWORD, STATE_CLOSED, STATE_OPENING, STATE_CLOSING, STATE_OPEN) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['aladdin_connect==0.3'] _LOGGER = logging.getLogger(__name__) NOTIFICATION_ID = 'aladdin_notification' NOTIFICATION_TITLE = 'Aladdin Connect Cover Setup' STATES_MAP = { 'open': STATE_OPEN, 'opening': STATE_OPENING, 'closed': STATE_CLOSED, 'closing': STATE_CLOSING } SUPPORTED_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Aladdin Connect platform.""" from aladdin_connect import AladdinConnectClient username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) acc = AladdinConnectClient(username, password) try: if not acc.login(): raise ValueError("Username or Password is incorrect") add_entities(AladdinDevice(acc, door) for door in acc.get_doors()) except (TypeError, KeyError, NameError, ValueError) as ex: _LOGGER.error("%s", ex) hass.components.persistent_notification.create( 'Error: {}<br />' 'You will need to restart hass after fixing.' ''.format(ex), title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID) class AladdinDevice(CoverDevice): """Representation of Aladdin Connect cover.""" def __init__(self, acc, device): """Initialize the cover.""" self._acc = acc self._device_id = device['device_id'] self._number = device['door_number'] self._name = device['name'] self._status = STATES_MAP.get(device['status']) @property def device_class(self): """Define this cover as a garage door.""" return 'garage' @property def supported_features(self): """Flag supported features.""" return SUPPORTED_FEATURES @property def unique_id(self): """Return a unique ID.""" return '{}-{}'.format(self._device_id, self._number) @property def name(self): """Return the name of the garage door.""" return self._name @property def is_opening(self): """Return if the cover is opening or not.""" return self._status == STATE_OPENING @property def is_closing(self): """Return if the cover is closing or not.""" return self._status == STATE_CLOSING @property def is_closed(self): """Return None if status is unknown, True if closed, else False.""" if self._status is None: return None return self._status == STATE_CLOSED def close_cover(self, **kwargs): """Issue close command to cover.""" self._acc.close_door(self._device_id, self._number) def open_cover(self, **kwargs): """Issue open command to cover.""" self._acc.open_door(self._device_id, self._number) def update(self): """Update status of cover.""" acc_status = self._acc.get_door_status(self._device_id, self._number) self._status = STATES_MAP.get(acc_status)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/cover/aladdin_connect.py
""" This component provides HA lock support for Abode Security System. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/lock.abode/ """ import logging from homeassistant.components.abode import AbodeDevice, DOMAIN as ABODE_DOMAIN from homeassistant.components.lock import LockDevice DEPENDENCIES = ['abode'] _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Abode lock devices.""" import abodepy.helpers.constants as CONST data = hass.data[ABODE_DOMAIN] devices = [] for device in data.abode.get_devices(generic_type=CONST.TYPE_LOCK): if data.is_excluded(device): continue devices.append(AbodeLock(data, device)) data.devices.extend(devices) add_entities(devices) class AbodeLock(AbodeDevice, LockDevice): """Representation of an Abode lock.""" def lock(self, **kwargs): """Lock the device.""" self._device.lock() def unlock(self, **kwargs): """Unlock the device.""" self._device.unlock() @property def is_locked(self): """Return true if device is on.""" return self._device.is_locked
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/abode/lock.py
"""Component for interfacing to Lutron Homeworks Series 4 and 8 systems. For more details about this component, please refer to the documentation at https://home-assistant.io/components/homeworks/ """ import logging import voluptuous as vol from homeassistant.core import callback from homeassistant.const import ( CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT, EVENT_HOMEASSISTANT_STOP) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import load_platform from homeassistant.helpers.dispatcher import ( dispatcher_send, async_dispatcher_connect) from homeassistant.util import slugify REQUIREMENTS = ['pyhomeworks==0.0.6'] _LOGGER = logging.getLogger(__name__) DOMAIN = 'homeworks' HOMEWORKS_CONTROLLER = 'homeworks' ENTITY_SIGNAL = 'homeworks_entity_{}' EVENT_BUTTON_PRESS = 'homeworks_button_press' EVENT_BUTTON_RELEASE = 'homeworks_button_release' CONF_DIMMERS = 'dimmers' CONF_KEYPADS = 'keypads' CONF_ADDR = 'addr' CONF_RATE = 'rate' FADE_RATE = 1. CV_FADE_RATE = vol.All(vol.Coerce(float), vol.Range(min=0, max=20)) DIMMER_SCHEMA = vol.Schema({ vol.Required(CONF_ADDR): cv.string, vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_RATE, default=FADE_RATE): CV_FADE_RATE }) KEYPAD_SCHEMA = vol.Schema({ vol.Required(CONF_ADDR): cv.string, vol.Required(CONF_NAME): cv.string, }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.port, vol.Required(CONF_DIMMERS): vol.All(cv.ensure_list, [DIMMER_SCHEMA]), vol.Optional(CONF_KEYPADS, default=[]): vol.All(cv.ensure_list, [KEYPAD_SCHEMA]), }), }, extra=vol.ALLOW_EXTRA) def setup(hass, base_config): """Start Homeworks controller.""" from pyhomeworks.pyhomeworks import Homeworks def hw_callback(msg_type, values): """Dispatch state changes.""" _LOGGER.debug('callback: %s, %s', msg_type, values) addr = values[0] signal = ENTITY_SIGNAL.format(addr) dispatcher_send(hass, signal, msg_type, values) config = base_config.get(DOMAIN) controller = Homeworks(config[CONF_HOST], config[CONF_PORT], hw_callback) hass.data[HOMEWORKS_CONTROLLER] = controller def cleanup(event): controller.close() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup) dimmers = config[CONF_DIMMERS] load_platform(hass, 'light', DOMAIN, {CONF_DIMMERS: dimmers}, base_config) for key_config in config[CONF_KEYPADS]: addr = key_config[CONF_ADDR] name = key_config[CONF_NAME] HomeworksKeypadEvent(hass, addr, name) return True class HomeworksDevice(): """Base class of a Homeworks device.""" def __init__(self, controller, addr, name): """Controller, address, and name of the device.""" self._addr = addr self._name = name self._controller = controller @property def unique_id(self): """Return a unique identifier.""" return 'homeworks.{}'.format(self._addr) @property def name(self): """Device name.""" return self._name @property def should_poll(self): """No need to poll.""" return False class HomeworksKeypadEvent: """When you want signals instead of entities. Stateless sensors such as keypads are expected to generate an event instead of a sensor entity in hass. """ def __init__(self, hass, addr, name): """Register callback that will be used for signals.""" self._hass = hass self._addr = addr self._name = name self._id = slugify(self._name) signal = ENTITY_SIGNAL.format(self._addr) async_dispatcher_connect( self._hass, signal, self._update_callback) @callback def _update_callback(self, msg_type, values): """Fire events if button is pressed or released.""" from pyhomeworks.pyhomeworks import ( HW_BUTTON_PRESSED, HW_BUTTON_RELEASED) if msg_type == HW_BUTTON_PRESSED: event = EVENT_BUTTON_PRESS elif msg_type == HW_BUTTON_RELEASED: event = EVENT_BUTTON_RELEASE else: return data = {CONF_ID: self._id, CONF_NAME: self._name, 'button': values[1]} self._hass.bus.async_fire(event, data)
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/homeworks/__init__.py
""" Support for MAX! Window Shutter via MAX! Cube. For more details about this platform, please refer to the documentation https://home-assistant.io/components/maxcube/ """ import logging from homeassistant.components.binary_sensor import BinarySensorDevice from homeassistant.components.maxcube import DATA_KEY _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Iterate through all MAX! Devices and add window shutters.""" devices = [] for handler in hass.data[DATA_KEY].values(): cube = handler.cube for device in cube.devices: name = "{} {}".format( cube.room_by_id(device.room_id).name, device.name) # Only add Window Shutters if cube.is_windowshutter(device): devices.append( MaxCubeShutter(handler, name, device.rf_address)) if devices: add_entities(devices) class MaxCubeShutter(BinarySensorDevice): """Representation of a MAX! Cube Binary Sensor device.""" def __init__(self, handler, name, rf_address): """Initialize MAX! Cube BinarySensorDevice.""" self._name = name self._sensor_type = 'window' self._rf_address = rf_address self._cubehandle = handler self._state = None @property def should_poll(self): """Return the polling state.""" return True @property def name(self): """Return the name of the BinarySensorDevice.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return self._sensor_type @property def is_on(self): """Return true if the binary sensor is on/open.""" return self._state def update(self): """Get latest data from MAX! Cube.""" self._cubehandle.update() device = self._cubehandle.cube.device_by_rf(self._rf_address) self._state = device.is_open
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/maxcube/binary_sensor.py
""" A sensor platform that give you information about next departures from Ruter. For more details about this platform, please refer to the documentation at https://www.home-assistant.io/components/sensor.ruter/ """ import logging import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME from homeassistant.helpers.entity import Entity from homeassistant.helpers.aiohttp_client import async_get_clientsession REQUIREMENTS = ['pyruter==1.1.0'] _LOGGER = logging.getLogger(__name__) CONF_STOP_ID = 'stop_id' CONF_DESTINATION = 'destination' CONF_OFFSET = 'offset' DEFAULT_NAME = 'Ruter' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_STOP_ID): cv.positive_int, vol.Optional(CONF_DESTINATION): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_OFFSET, default=0): cv.positive_int, }) async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Create the sensor.""" from pyruter.api import Departures _LOGGER.warning("The API used in this sensor is shutting down soon, " "you should consider starting to use the " "'entur_public_transport' sensor instead") stop_id = config[CONF_STOP_ID] destination = config.get(CONF_DESTINATION) name = config[CONF_NAME] offset = config[CONF_OFFSET] session = async_get_clientsession(hass) ruter = Departures(hass.loop, stop_id, destination, session) sensor = [RuterSensor(ruter, name, offset)] async_add_entities(sensor, True) class RuterSensor(Entity): """Representation of a Ruter sensor.""" def __init__(self, ruter, name, offset): """Initialize the sensor.""" self.ruter = ruter self._attributes = {} self._name = name self._offset = offset self._state = None async def async_update(self): """Get the latest data from the Ruter API.""" await self.ruter.get_departures() if self.ruter.departures is None: _LOGGER.error("No data recieved from Ruter.") return try: data = self.ruter.departures[self._offset] self._state = data['time'] self._attributes['line'] = data['line'] self._attributes['destination'] = data['destination'] except (KeyError, IndexError) as error: _LOGGER.debug("Error getting data from Ruter, %s", error) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Return the icon of the sensor.""" return 'mdi:bus' @property def device_state_attributes(self): """Return attributes for the sensor.""" return self._attributes
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/sensor/ruter.py
""" Support for MySensors lights. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/light.mysensors/ """ from homeassistant.components import mysensors from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, Light) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.util.color import rgb_hex_to_rgb_list import homeassistant.util.color as color_util SUPPORT_MYSENSORS_RGBW = SUPPORT_COLOR | SUPPORT_WHITE_VALUE async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for lights.""" device_class_map = { 'S_DIMMER': MySensorsLightDimmer, 'S_RGB_LIGHT': MySensorsLightRGB, 'S_RGBW_LIGHT': MySensorsLightRGBW, } mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, device_class_map, async_add_entities=async_add_entities) class MySensorsLight(mysensors.device.MySensorsEntity, Light): """Representation of a MySensors Light child node.""" def __init__(self, *args): """Initialize a MySensors Light.""" super().__init__(*args) self._state = None self._brightness = None self._hs = None self._white = None @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def hs_color(self): """Return the hs color value [int, int].""" return self._hs @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def assumed_state(self): """Return true if unable to access real state of entity.""" return self.gateway.optimistic @property def is_on(self): """Return true if device is on.""" return self._state def _turn_on_light(self): """Turn on light child device.""" set_req = self.gateway.const.SetReq if self._state: return self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_LIGHT, 1) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = True self._values[set_req.V_LIGHT] = STATE_ON def _turn_on_dimmer(self, **kwargs): """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq brightness = self._brightness if ATTR_BRIGHTNESS not in kwargs or \ kwargs[ATTR_BRIGHTNESS] == self._brightness or \ set_req.V_DIMMER not in self._values: return brightness = kwargs[ATTR_BRIGHTNESS] percent = round(100 * brightness / 255) self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, percent) if self.gateway.optimistic: # optimistically assume that light has changed state self._brightness = brightness self._values[set_req.V_DIMMER] = percent def _turn_on_rgb_and_w(self, hex_template, **kwargs): """Turn on RGB or RGBW child device.""" rgb = list(color_util.color_hs_to_RGB(*self._hs)) white = self._white hex_color = self._values.get(self.value_type) hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None: new_rgb = color_util.color_hs_to_RGB(*hs_color) else: new_rgb = None new_white = kwargs.get(ATTR_WHITE_VALUE) if new_rgb is None and new_white is None: return if new_rgb is not None: rgb = list(new_rgb) if hex_template == '%02x%02x%02x%02x': if new_white is not None: rgb.append(new_white) else: rgb.append(white) hex_color = hex_template % tuple(rgb) if len(rgb) > 3: white = rgb.pop() self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color) if self.gateway.optimistic: # optimistically assume that light has changed state self._hs = color_util.color_RGB_to_hs(*rgb) self._white = white self._values[self.value_type] = hex_color async def async_turn_off(self, **kwargs): """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value( self.node_id, self.child_id, value_type, 0) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = False self._values[value_type] = STATE_OFF self.async_schedule_update_ha_state() def _async_update_light(self): """Update the controller with values from light child.""" value_type = self.gateway.const.SetReq.V_LIGHT self._state = self._values[value_type] == STATE_ON def _async_update_dimmer(self): """Update the controller with values from dimmer child.""" value_type = self.gateway.const.SetReq.V_DIMMER if value_type in self._values: self._brightness = round(255 * int(self._values[value_type]) / 100) if self._brightness == 0: self._state = False def _async_update_rgb_or_w(self): """Update the controller with values from RGB or RGBW child.""" value = self._values[self.value_type] color_list = rgb_hex_to_rgb_list(value) if len(color_list) > 3: self._white = color_list.pop() self._hs = color_util.color_RGB_to_hs(*color_list) class MySensorsLightDimmer(MySensorsLight): """Dimmer child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() class MySensorsLightRGB(MySensorsLight): """RGB child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_COLOR return SUPPORT_COLOR async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w('%02x%02x%02x', **kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() self._async_update_rgb_or_w() class MySensorsLightRGBW(MySensorsLightRGB): """RGBW child class to MySensorsLightRGB.""" # pylint: disable=too-many-ancestors @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_MYSENSORS_RGBW return SUPPORT_MYSENSORS_RGBW async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w('%02x%02x%02x%02x', **kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state()
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/mysensors/light.py
""" Component for monitoring activity on a folder. For more details about this platform, refer to the documentation at https://home-assistant.io/components/folder_watcher/ """ import os import logging import voluptuous as vol from homeassistant.const import ( EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['watchdog==0.8.3'] _LOGGER = logging.getLogger(__name__) CONF_FOLDER = 'folder' CONF_PATTERNS = 'patterns' DEFAULT_PATTERN = '*' DOMAIN = "folder_watcher" CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.All(cv.ensure_list, [vol.Schema({ vol.Required(CONF_FOLDER): cv.isdir, vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): vol.All(cv.ensure_list, [cv.string]), })]) }, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Set up the folder watcher.""" conf = config[DOMAIN] for watcher in conf: path = watcher[CONF_FOLDER] patterns = watcher[CONF_PATTERNS] if not hass.config.is_allowed_path(path): _LOGGER.error("folder %s is not valid or allowed", path) return False Watcher(path, patterns, hass) return True def create_event_handler(patterns, hass): """Return the Watchdog EventHandler object.""" from watchdog.events import PatternMatchingEventHandler class EventHandler(PatternMatchingEventHandler): """Class for handling Watcher events.""" def __init__(self, patterns, hass): """Initialise the EventHandler.""" super().__init__(patterns) self.hass = hass def process(self, event): """On Watcher event, fire HA event.""" _LOGGER.debug("process(%s)", event) if not event.is_directory: folder, file_name = os.path.split(event.src_path) self.hass.bus.fire( DOMAIN, { "event_type": event.event_type, 'path': event.src_path, 'file': file_name, 'folder': folder, }) def on_modified(self, event): """File modified.""" self.process(event) def on_moved(self, event): """File moved.""" self.process(event) def on_created(self, event): """File created.""" self.process(event) def on_deleted(self, event): """File deleted.""" self.process(event) return EventHandler(patterns, hass) class Watcher(): """Class for starting Watchdog.""" def __init__(self, path, patterns, hass): """Initialise the watchdog observer.""" from watchdog.observers import Observer self._observer = Observer() self._observer.schedule( create_event_handler(patterns, hass), path, recursive=True) hass.bus.listen_once(EVENT_HOMEASSISTANT_START, self.startup) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self.shutdown) def startup(self, event): """Start the watcher.""" self._observer.start() def shutdown(self, event): """Shutdown the watcher.""" self._observer.stop() self._observer.join()
"""The tests for local file camera component.""" from unittest.mock import mock_open, patch import pytest from homeassistant.components import camera from homeassistant.components.camera import STATE_STREAMING, STATE_IDLE from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.components.camera import common @pytest.fixture def demo_camera(hass): """Initialize a demo camera platform.""" hass.loop.run_until_complete(async_setup_component(hass, 'camera', { camera.DOMAIN: { 'platform': 'demo' } })) return hass.data['camera'].get_entity('camera.demo_camera') async def test_init_state_is_streaming(hass, demo_camera): """Demo camera initialize as streaming.""" assert demo_camera.state == STATE_STREAMING mock_on_img = mock_open(read_data=b'ON') with patch('homeassistant.components.camera.demo.open', mock_on_img, create=True): image = await camera.async_get_image(hass, demo_camera.entity_id) assert mock_on_img.called assert mock_on_img.call_args_list[0][0][0][-6:] \ in ['_0.jpg', '_1.jpg', '_2.jpg', '_3.jpg'] assert image.content == b'ON' async def test_turn_on_state_back_to_streaming(hass, demo_camera): """After turn on state back to streaming.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_IDLE await common.async_turn_on(hass, demo_camera.entity_id) await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_turn_off_image(hass, demo_camera): """After turn off, Demo camera raise error.""" await common.async_turn_off(hass, demo_camera.entity_id) await hass.async_block_till_done() with pytest.raises(HomeAssistantError) as error: await camera.async_get_image(hass, demo_camera.entity_id) assert error.args[0] == 'Camera is off' async def test_turn_off_invalid_camera(hass, demo_camera): """Turn off non-exist camera should quietly fail.""" assert demo_camera.state == STATE_STREAMING await common.async_turn_off(hass, 'camera.invalid_camera') await hass.async_block_till_done() assert demo_camera.state == STATE_STREAMING async def test_motion_detection(hass): """Test motion detection services.""" # Setup platform await async_setup_component(hass, 'camera', { 'camera': { 'platform': 'demo' } }) # Fetch state and check motion detection attribute state = hass.states.get('camera.demo_camera') assert not state.attributes.get('motion_detection') # Call service to turn on motion detection common.enable_motion_detection(hass, 'camera.demo_camera') await hass.async_block_till_done() # Check if state has been updated. state = hass.states.get('camera.demo_camera') assert state.attributes.get('motion_detection')
PetePriority/home-assistant
tests/components/camera/test_demo.py
homeassistant/components/folder_watcher/__init__.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Define the Enhanced Character-Separated-Values (ECSV) which allows for reading and writing all the meta data associated with an astropy Table object. """ import re from collections import OrderedDict import warnings import json import numpy as np from . import core, basic from astropy.table import meta, serialize from astropy.utils.data_info import serialize_context_as from astropy.utils.exceptions import AstropyUserWarning, AstropyWarning from astropy.io.ascii.core import convert_numpy __doctest_requires__ = {'Ecsv': ['yaml']} ECSV_VERSION = '1.0' DELIMITERS = (' ', ',') ECSV_DATATYPES = ( 'bool', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float16', 'float32', 'float64', 'float128', 'string') class EcsvHeader(basic.BasicHeader): """Header class for which the column definition line starts with the comment character. See the :class:`CommentedHeader` class for an example. """ def process_lines(self, lines): """Return only non-blank lines that start with the comment regexp. For these lines strip out the matching characters and leading/trailing whitespace.""" re_comment = re.compile(self.comment) for line in lines: line = line.strip() if not line: continue match = re_comment.match(line) if match: out = line[match.end():] if out: yield out else: # Stop iterating on first failed match for a non-blank line return def write(self, lines): """ Write header information in the ECSV ASCII format. This function is called at the point when preprocessing has been done to convert the input table columns to `self.cols` which is a list of `astropy.io.ascii.core.Column` objects. In particular `col.str_vals` is available for each column with the string representation of each column item for output. This format starts with a delimiter separated list of the column names in order to make this format readable by humans and simple csv-type readers. It then encodes the full table meta and column attributes and meta as YAML and pretty-prints this in the header. Finally the delimited column names are repeated again, for humans and readers that look for the *last* comment line as defining the column names. """ if self.splitter.delimiter not in DELIMITERS: raise ValueError('only space and comma are allowed for delimiter in ECSV format') # Now assemble the header dict that will be serialized by the YAML dumper header = {'cols': self.cols, 'schema': 'astropy-2.0'} if self.table_meta: header['meta'] = self.table_meta # Set the delimiter only for the non-default option(s) if self.splitter.delimiter != ' ': header['delimiter'] = self.splitter.delimiter header_yaml_lines = ([f'%ECSV {ECSV_VERSION}', '---'] + meta.get_yaml_from_header(header)) lines.extend([self.write_comment + line for line in header_yaml_lines]) lines.append(self.splitter.join([x.info.name for x in self.cols])) def write_comments(self, lines, meta): """ WRITE: Override the default write_comments to do nothing since this is handled in the custom write method. """ pass def update_meta(self, lines, meta): """ READ: Override the default update_meta to do nothing. This process is done in get_cols() for this reader. """ pass def get_cols(self, lines): """ READ: Initialize the header Column objects from the table ``lines``. Parameters ---------- lines : list List of table lines """ # Cache a copy of the original input lines before processing below raw_lines = lines # Extract non-blank comment (header) lines with comment character stripped lines = list(self.process_lines(lines)) # Validate that this is a ECSV file ecsv_header_re = r"""%ECSV [ ] (?P<major> \d+) \. (?P<minor> \d+) \.? (?P<bugfix> \d+)? $""" no_header_msg = ('ECSV header line like "# %ECSV <version>" not found as first line.' ' This is required for a ECSV file.') if not lines: raise core.InconsistentTableError(no_header_msg) match = re.match(ecsv_header_re, lines[0].strip(), re.VERBOSE) if not match: raise core.InconsistentTableError(no_header_msg) # ecsv_version could be constructed here, but it is not currently used. try: header = meta.get_header_from_yaml(lines) except ImportError as exc: if 'PyYAML package is required' in str(exc): warnings.warn("file looks like ECSV format but PyYAML is not installed " "so it cannot be parsed as ECSV", AstropyWarning) raise core.InconsistentTableError('unable to parse yaml in meta header' ' (PyYAML package is required)') except meta.YamlParseError: raise core.InconsistentTableError('unable to parse yaml in meta header') if 'meta' in header: self.table_meta = header['meta'] if 'delimiter' in header: delimiter = header['delimiter'] if delimiter not in DELIMITERS: raise ValueError('only space and comma are allowed for delimiter in ECSV format') self.splitter.delimiter = delimiter self.data.splitter.delimiter = delimiter # Create the list of io.ascii column objects from `header` header_cols = OrderedDict((x['name'], x) for x in header['datatype']) self.names = [x['name'] for x in header['datatype']] # Read the first non-commented line of table and split to get the CSV # header column names. This is essentially what the Basic reader does. header_line = next(super().process_lines(raw_lines)) header_names = next(self.splitter([header_line])) # Check for consistency of the ECSV vs. CSV header column names if header_names != self.names: raise core.InconsistentTableError('column names from ECSV header {} do not ' 'match names from header line of CSV data {}' .format(self.names, header_names)) # BaseHeader method to create self.cols, which is a list of # io.ascii.core.Column objects (*not* Table Column objects). self._set_cols_from_names() # Transfer attributes from the column descriptor stored in the input # header YAML metadata to the new columns to create this table. for col in self.cols: for attr in ('description', 'format', 'unit', 'meta', 'subtype'): if attr in header_cols[col.name]: setattr(col, attr, header_cols[col.name][attr]) col.dtype = header_cols[col.name]['datatype'] if col.dtype not in ECSV_DATATYPES: raise ValueError(f'datatype {col.dtype!r} of column {col.name!r} ' f'is not in allowed values {ECSV_DATATYPES}') # Subtype is written like "int64[2,null]" and we want to split this # out to "int64" and [2, None]. subtype = col.subtype if subtype and '[' in subtype: idx = subtype.index('[') col.subtype = subtype[:idx] col.shape = json.loads(subtype[idx:]) # Convert ECSV "string" to numpy "str" for attr in ('dtype', 'subtype'): if getattr(col, attr) == 'string': setattr(col, attr, 'str') # ECSV subtype of 'json' maps to numpy 'object' dtype if col.subtype == 'json': col.subtype = 'object' def _check_dtype_is_str(col): if col.dtype != 'str': raise ValueError(f'datatype of column {col.name!r} must be "string"') class EcsvOutputter(core.TableOutputter): """ After reading the input lines and processing, convert the Reader columns and metadata to an astropy.table.Table object. This overrides the default converters to be an empty list because there is no "guessing" of the conversion function. """ default_converters = [] def __call__(self, cols, meta): # Convert to a Table with all plain Column subclass columns out = super().__call__(cols, meta) # If mixin columns exist (based on the special '__mixin_columns__' # key in the table ``meta``), then use that information to construct # appropriate mixin columns and remove the original data columns. # If no __mixin_columns__ exists then this function just passes back # the input table. out = serialize._construct_mixins_from_columns(out) return out def _convert_vals(self, cols): """READ: Convert str_vals in `cols` to final arrays with correct dtypes. This is adapted from ``BaseOutputter._convert_vals``. In the case of ECSV there is no guessing and all types are known in advance. A big change is handling the possibility of JSON-encoded values, both unstructured object data and structured values that may contain masked data. """ for col in cols: try: # 1-d or N-d object columns are serialized as JSON. if col.subtype == 'object': _check_dtype_is_str(col) col_vals = [json.loads(val) for val in col.str_vals] col.data = np.empty([len(col_vals)] + col.shape, dtype=object) col.data[...] = col_vals # Variable length arrays with shape (n, m, ..., *) for fixed # n, m, .. and variable in last axis. Masked values here are # not currently supported. elif col.shape and col.shape[-1] is None: _check_dtype_is_str(col) # Empty (blank) values in original ECSV are changed to "0" # in str_vals with corresponding col.mask being created and # set accordingly. Instead use an empty list here. if hasattr(col, 'mask'): for idx in np.nonzero(col.mask)[0]: col.str_vals[idx] = '[]' # Remake as a 1-d object column of numpy ndarrays or # MaskedArray using the datatype specified in the ECSV file. col_vals = [] for str_val in col.str_vals: obj_val = json.loads(str_val) # list or nested lists try: arr_val = np.array(obj_val, dtype=col.subtype) except TypeError: # obj_val has entries that are inconsistent with # dtype. For a valid ECSV file the only possibility # is None values (indicating missing values). data = np.array(obj_val, dtype=object) # Replace all the None with an appropriate fill value mask = (data == None) # noqa: E711 kind = np.dtype(col.subtype).kind data[mask] = {'U': '', 'S': b''}.get(kind, 0) arr_val = np.ma.array(data.astype(col.subtype), mask=mask) col_vals.append(arr_val) col.shape = () col.dtype = np.dtype(object) # np.array(col_vals_arr, dtype=object) fails ?? so this workaround: col.data = np.empty(len(col_vals), dtype=object) col.data[:] = col_vals # Multidim columns with consistent shape (n, m, ...). These # might be masked. elif col.shape: _check_dtype_is_str(col) # Change empty (blank) values in original ECSV to something # like "[[null, null],[null,null]]" so subsequent JSON # decoding works. Delete `col.mask` so that later code in # core TableOutputter.__call__() that deals with col.mask # does not run (since handling is done here already). if hasattr(col, 'mask'): all_none_arr = np.full(shape=col.shape, fill_value=None, dtype=object) all_none_json = json.dumps(all_none_arr.tolist()) for idx in np.nonzero(col.mask)[0]: col.str_vals[idx] = all_none_json del col.mask col_vals = [json.loads(val) for val in col.str_vals] # Make a numpy object array of col_vals to look for None # (masked values) data = np.array(col_vals, dtype=object) mask = (data == None) # noqa: E711 if not np.any(mask): # No None's, just convert to required dtype col.data = data.astype(col.subtype) else: # Replace all the None with an appropriate fill value kind = np.dtype(col.subtype).kind data[mask] = {'U': '', 'S': b''}.get(kind, 0) # Finally make a MaskedArray with the filled data + mask col.data = np.ma.array(data.astype(col.subtype), mask=mask) # Regular scalar value column else: if col.subtype: warnings.warn(f'unexpected subtype {col.subtype!r} set for column ' f'{col.name!r}, using dtype={col.dtype!r} instead.', category=AstropyUserWarning) converter_func, _ = convert_numpy(col.dtype) col.data = converter_func(col.str_vals) if col.data.shape[1:] != tuple(col.shape): raise ValueError('shape mismatch between value and column specifier') except json.JSONDecodeError: raise ValueError(f'column {col.name!r} failed to convert: ' 'column value is not valid JSON') except Exception as exc: raise ValueError(f'column {col.name!r} failed to convert: {exc}') class EcsvData(basic.BasicData): def _set_fill_values(self, cols): """READ: Set the fill values of the individual cols based on fill_values of BaseData For ECSV handle the corner case of data that has been serialized using the serialize_method='data_mask' option, which writes the full data and mask directly, AND where that table includes a string column with zero-length string entries ("") which are valid data. Normally the super() method will set col.fill_value=('', '0') to replace blanks with a '0'. But for that corner case subset, instead do not do any filling. """ super()._set_fill_values(cols) # Get the serialized columns spec. It might not exist and there might # not even be any table meta, so punt in those cases. try: scs = self.header.table_meta['__serialized_columns__'] except (AttributeError, KeyError): return # Got some serialized columns, so check for string type and serialized # as a MaskedColumn. Without 'data_mask', MaskedColumn objects are # stored to ECSV as normal columns. for col in cols: if (col.dtype == 'str' and col.name in scs and scs[col.name]['__class__'] == 'astropy.table.column.MaskedColumn'): col.fill_values = {} # No data value replacement def str_vals(self): """WRITE: convert all values in table to a list of lists of strings This version considerably simplifies the base method: - No need to set fill values and column formats - No per-item formatting, just use repr() - Use JSON for object-type or multidim values - Only Column or MaskedColumn can end up as cols here. - Only replace masked values with "", not the generalized filling """ for col in self.cols: if len(col.shape) > 1 or col.info.dtype.kind == 'O': def format_col_item(idx): obj = col[idx] try: obj = obj.tolist() except AttributeError: pass return json.dumps(obj, separators=(',', ':')) else: def format_col_item(idx): return str(col[idx]) try: col.str_vals = [format_col_item(idx) for idx in range(len(col))] except TypeError as exc: raise TypeError(f'could not convert column {col.info.name!r}' f' to string: {exc}') from exc # Replace every masked value in a 1-d column with an empty string. # For multi-dim columns this gets done by JSON via "null". if hasattr(col, 'mask') and col.ndim == 1: for idx in col.mask.nonzero()[0]: col.str_vals[idx] = "" out = [col.str_vals for col in self.cols] return out class Ecsv(basic.Basic): """ECSV (Enhanced Character Separated Values) format table. Th ECSV format allows for specification of key table and column meta-data, in particular the data type and unit. See: https://github.com/astropy/astropy-APEs/blob/main/APE6.rst Examples -------- >>> from astropy.table import Table >>> ecsv_content = '''# %ECSV 0.9 ... # --- ... # datatype: ... # - {name: a, unit: m / s, datatype: int64, format: '%03d'} ... # - {name: b, unit: km, datatype: int64, description: This is column b} ... a b ... 001 2 ... 004 3 ... ''' >>> Table.read(ecsv_content, format='ascii.ecsv') <Table length=2> a b m / s km int64 int64 ----- ----- 001 2 004 3 """ _format_name = 'ecsv' _description = 'Enhanced CSV' _io_registry_suffix = '.ecsv' header_class = EcsvHeader data_class = EcsvData outputter_class = EcsvOutputter max_ndim = None # No limit on column dimensionality def update_table_data(self, table): """ Update table columns in place if mixin columns are present. This is a hook to allow updating the table columns after name filtering but before setting up to write the data. This is currently only used by ECSV and is otherwise just a pass-through. Parameters ---------- table : `astropy.table.Table` Input table for writing Returns ------- table : `astropy.table.Table` Output table for writing """ with serialize_context_as('ecsv'): out = serialize.represent_mixins_as_columns(table) return out
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest asdf = pytest.importorskip('asdf') import io from astropy import units as u from asdf.tests import helpers # TODO: Implement defunit def test_unit(): yaml = """ unit: !unit/unit-1.0.0 "2.1798721 10-18kg m2 s-2" """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff) as ff: assert ff.tree['unit'].is_equivalent(u.Ry) buff2 = io.BytesIO() ff.write_to(buff2) buff2.seek(0) with asdf.open(buff2) as ff: assert ff.tree['unit'].is_equivalent(u.Ry)
lpsinger/astropy
astropy/io/misc/asdf/tags/unit/tests/test_unit.py
astropy/io/ascii/ecsv.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The astropy.utils.iers package provides access to the tables provided by the International Earth Rotation and Reference Systems Service, in particular allowing interpolation of published UT1-UTC values for given times. These are used in `astropy.time` to provide UT1 values. The polar motions are also used for determining earth orientation for celestial-to-terrestrial coordinate transformations (in `astropy.coordinates`). """ import re from datetime import datetime from warnings import warn from urllib.parse import urlparse import numpy as np import erfa from astropy.time import Time, TimeDelta from astropy import config as _config from astropy import units as u from astropy.table import QTable, MaskedColumn from astropy.utils.data import (get_pkg_data_filename, clear_download_cache, is_url_in_cache, get_readable_fileobj) from astropy.utils.state import ScienceState from astropy import utils from astropy.utils.exceptions import AstropyWarning __all__ = ['Conf', 'conf', 'earth_orientation_table', 'IERS', 'IERS_B', 'IERS_A', 'IERS_Auto', 'FROM_IERS_B', 'FROM_IERS_A', 'FROM_IERS_A_PREDICTION', 'TIME_BEFORE_IERS_RANGE', 'TIME_BEYOND_IERS_RANGE', 'IERS_A_FILE', 'IERS_A_URL', 'IERS_A_URL_MIRROR', 'IERS_A_README', 'IERS_B_FILE', 'IERS_B_URL', 'IERS_B_README', 'IERSRangeError', 'IERSStaleWarning', 'LeapSeconds', 'IERS_LEAP_SECOND_FILE', 'IERS_LEAP_SECOND_URL', 'IETF_LEAP_SECOND_URL'] # IERS-A default file name, URL, and ReadMe with content description IERS_A_FILE = 'finals2000A.all' IERS_A_URL = 'ftp://anonymous:mail%40astropy.org@gdc.cddis.eosdis.nasa.gov/pub/products/iers/finals2000A.all' # noqa: E501 IERS_A_URL_MIRROR = 'https://datacenter.iers.org/data/9/finals2000A.all' IERS_A_README = get_pkg_data_filename('data/ReadMe.finals2000A') # IERS-B default file name, URL, and ReadMe with content description IERS_B_FILE = get_pkg_data_filename('data/eopc04_IAU2000.62-now') IERS_B_URL = 'http://hpiers.obspm.fr/iers/eop/eopc04/eopc04_IAU2000.62-now' IERS_B_README = get_pkg_data_filename('data/ReadMe.eopc04_IAU2000') # LEAP SECONDS default file name, URL, and alternative format/URL IERS_LEAP_SECOND_FILE = get_pkg_data_filename('data/Leap_Second.dat') IERS_LEAP_SECOND_URL = 'https://hpiers.obspm.fr/iers/bul/bulc/Leap_Second.dat' IETF_LEAP_SECOND_URL = 'https://www.ietf.org/timezones/data/leap-seconds.list' # Status/source values returned by IERS.ut1_utc FROM_IERS_B = 0 FROM_IERS_A = 1 FROM_IERS_A_PREDICTION = 2 TIME_BEFORE_IERS_RANGE = -1 TIME_BEYOND_IERS_RANGE = -2 MJD_ZERO = 2400000.5 INTERPOLATE_ERROR = """\ interpolating from IERS_Auto using predictive values that are more than {0} days old. Normally you should not see this error because this class automatically downloads the latest IERS-A table. Perhaps you are offline? If you understand what you are doing then this error can be suppressed by setting the auto_max_age configuration variable to ``None``: from astropy.utils.iers import conf conf.auto_max_age = None """ MONTH_ABBR = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] def download_file(*args, **kwargs): """ Overload astropy.utils.data.download_file within iers module to use a custom (longer) wait time. This just passes through ``*args`` and ``**kwargs`` after temporarily setting the download_file remote timeout to the local ``iers.conf.remote_timeout`` value. """ kwargs.setdefault('http_headers', {'User-Agent': 'astropy/iers', 'Accept': '*/*'}) with utils.data.conf.set_temp('remote_timeout', conf.remote_timeout): return utils.data.download_file(*args, **kwargs) def _none_to_float(value): """ Convert None to a valid floating point value. Especially for auto_max_age = None. """ return (value if value is not None else np.finfo(float).max) class IERSStaleWarning(AstropyWarning): pass class Conf(_config.ConfigNamespace): """ Configuration parameters for `astropy.utils.iers`. """ auto_download = _config.ConfigItem( True, 'Enable auto-downloading of the latest IERS data. If set to False ' 'then the local IERS-B file will be used by default (even if the ' 'full IERS file with predictions was already downloaded and cached). ' 'This parameter also controls whether internet resources will be ' 'queried to update the leap second table if the installed version is ' 'out of date. Default is True.') auto_max_age = _config.ConfigItem( 30.0, 'Maximum age (days) of predictive data before auto-downloading. ' 'See "Auto refresh behavior" in astropy.utils.iers documentation for details.' 'Default is 30.') iers_auto_url = _config.ConfigItem( IERS_A_URL, 'URL for auto-downloading IERS file data.') iers_auto_url_mirror = _config.ConfigItem( IERS_A_URL_MIRROR, 'Mirror URL for auto-downloading IERS file data.') remote_timeout = _config.ConfigItem( 10.0, 'Remote timeout downloading IERS file data (seconds).') system_leap_second_file = _config.ConfigItem( '', 'System file with leap seconds.') iers_leap_second_auto_url = _config.ConfigItem( IERS_LEAP_SECOND_URL, 'URL for auto-downloading leap seconds.') ietf_leap_second_auto_url = _config.ConfigItem( IETF_LEAP_SECOND_URL, 'Alternate URL for auto-downloading leap seconds.') conf = Conf() class IERSRangeError(IndexError): """ Any error for when dates are outside of the valid range for IERS """ class IERS(QTable): """Generic IERS table class, defining interpolation functions. Sub-classed from `astropy.table.QTable`. The table should hold columns 'MJD', 'UT1_UTC', 'dX_2000A'/'dY_2000A', and 'PM_x'/'PM_y'. """ iers_table = None """Cached table, returned if ``open`` is called without arguments.""" @classmethod def open(cls, file=None, cache=False, **kwargs): """Open an IERS table, reading it from a file if not loaded before. Parameters ---------- file : str or None full local or network path to the ascii file holding IERS data, for passing on to the ``read`` class methods (further optional arguments that are available for some IERS subclasses can be added). If None, use the default location from the ``read`` class method. cache : bool Whether to use cache. Defaults to False, since IERS files are regularly updated. Returns ------- IERS An IERS table class instance Notes ----- On the first call in a session, the table will be memoized (in the ``iers_table`` class attribute), and further calls to ``open`` will return this stored table if ``file=None`` (the default). If a table needs to be re-read from disk, pass on an explicit file location or use the (sub-class) close method and re-open. If the location is a network location it is first downloaded via download_file. For the IERS class itself, an IERS_B sub-class instance is opened. """ if file is not None or cls.iers_table is None: if file is not None: if urlparse(file).netloc: kwargs.update(file=download_file(file, cache=cache)) else: kwargs.update(file=file) # TODO: the below is really ugly and probably a bad idea. Instead, # there should probably be an IERSBase class, which provides # useful methods but cannot really be used on its own, and then # *perhaps* an IERS class which provides best defaults. But for # backwards compatibility, we use the IERS_B reader for IERS here. if cls is IERS: cls.iers_table = IERS_B.read(**kwargs) else: cls.iers_table = cls.read(**kwargs) return cls.iers_table @classmethod def close(cls): """Remove the IERS table from the class. This allows the table to be re-read from disk during one's session (e.g., if one finds it is out of date and has updated the file). """ cls.iers_table = None def mjd_utc(self, jd1, jd2=0.): """Turn a time to MJD, returning integer and fractional parts. Parameters ---------- jd1 : float, array, or `~astropy.time.Time` first part of two-part JD, or Time object jd2 : float or array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. Returns ------- mjd : float or array integer part of MJD utc : float or array fractional part of MJD """ try: # see if this is a Time object jd1, jd2 = jd1.utc.jd1, jd1.utc.jd2 except Exception: pass mjd = np.floor(jd1 - MJD_ZERO + jd2) utc = jd1 - (MJD_ZERO+mjd) + jd2 return mjd, utc def ut1_utc(self, jd1, jd2=0., return_status=False): """Interpolate UT1-UTC corrections in IERS Table for given dates. Parameters ---------- jd1 : float, array of float, or `~astropy.time.Time` object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- ut1_utc : float or float array UT1-UTC, interpolated in IERS Table status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['UT1_UTC'], self.ut1_utc_source if return_status else None) def dcip_xy(self, jd1, jd2=0., return_status=False): """Interpolate CIP corrections in IERS Table for given dates. Parameters ---------- jd1 : float, array of float, or `~astropy.time.Time` object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD (default 0., ignored if jd1 is Time) return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- D_x : `~astropy.units.Quantity` ['angle'] x component of CIP correction for the requested times. D_y : `~astropy.units.Quantity` ['angle'] y component of CIP correction for the requested times status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['dX_2000A', 'dY_2000A'], self.dcip_source if return_status else None) def pm_xy(self, jd1, jd2=0., return_status=False): """Interpolate polar motions from IERS Table for given dates. Parameters ---------- jd1 : float, array of float, or `~astropy.time.Time` object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- PM_x : `~astropy.units.Quantity` ['angle'] x component of polar motion for the requested times. PM_y : `~astropy.units.Quantity` ['angle'] y component of polar motion for the requested times. status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['PM_x', 'PM_y'], self.pm_source if return_status else None) def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd): """ Check that the indices from interpolation match those after clipping to the valid table range. This method gets overridden in the IERS_Auto class because it has different requirements. """ if np.any(indices_orig != indices_clipped): raise IERSRangeError('(some) times are outside of range covered ' 'by IERS table.') def _interpolate(self, jd1, jd2, columns, source=None): mjd, utc = self.mjd_utc(jd1, jd2) # enforce array is_scalar = not hasattr(mjd, '__array__') or mjd.ndim == 0 if is_scalar: mjd = np.array([mjd]) utc = np.array([utc]) elif mjd.size == 0: # Short-cut empty input. return np.array([]) self._refresh_table_as_needed(mjd) # For typical format, will always find a match (since MJD are integer) # hence, important to define which side we will be; this ensures # self['MJD'][i-1]<=mjd<self['MJD'][i] i = np.searchsorted(self['MJD'].value, mjd, side='right') # Get index to MJD at or just below given mjd, clipping to ensure we # stay in range of table (status will be set below for those outside) i1 = np.clip(i, 1, len(self) - 1) i0 = i1 - 1 mjd_0, mjd_1 = self['MJD'][i0].value, self['MJD'][i1].value results = [] for column in columns: val_0, val_1 = self[column][i0], self[column][i1] d_val = val_1 - val_0 if column == 'UT1_UTC': # Check & correct for possible leap second (correcting diff., # not 1st point, since jump can only happen right at 2nd point) d_val -= d_val.round() # Linearly interpolate (which is what TEMPO does for UT1-UTC, but # may want to follow IERS gazette #13 for more precise # interpolation and correction for tidal effects; # https://maia.usno.navy.mil/iers-gaz13) val = val_0 + (mjd - mjd_0 + utc) / (mjd_1 - mjd_0) * d_val # Do not extrapolate outside range, instead just propagate last values. val[i == 0] = self[column][0] val[i == len(self)] = self[column][-1] if is_scalar: val = val[0] results.append(val) if source: # Set status to source, using the routine passed in. status = source(i1) # Check for out of range status[i == 0] = TIME_BEFORE_IERS_RANGE status[i == len(self)] = TIME_BEYOND_IERS_RANGE if is_scalar: status = status[0] results.append(status) return results else: self._check_interpolate_indices(i1, i, np.max(mjd)) return results[0] if len(results) == 1 else results def _refresh_table_as_needed(self, mjd): """ Potentially update the IERS table in place depending on the requested time values in ``mdj`` and the time span of the table. The base behavior is not to update the table. ``IERS_Auto`` overrides this method. """ pass def ut1_utc_source(self, i): """Source for UT1-UTC. To be overridden by subclass.""" return np.zeros_like(i) def dcip_source(self, i): """Source for CIP correction. To be overridden by subclass.""" return np.zeros_like(i) def pm_source(self, i): """Source for polar motion. To be overridden by subclass.""" return np.zeros_like(i) @property def time_now(self): """ Property to provide the current time, but also allow for explicitly setting the _time_now attribute for testing purposes. """ try: return self._time_now except Exception: return Time.now() def _convert_col_for_table(self, col): # Fill masked columns with units to avoid dropped-mask warnings # when converting to Quantity. # TODO: Once we support masked quantities, we can drop this and # in the code below replace b_bad with table['UT1_UTC_B'].mask, etc. if (getattr(col, 'unit', None) is not None and isinstance(col, MaskedColumn)): col = col.filled(np.nan) return super()._convert_col_for_table(col) class IERS_A(IERS): """IERS Table class targeted to IERS A, provided by USNO. These include rapid turnaround and predicted times. See https://datacenter.iers.org/eop.php Notes ----- The IERS A file is not part of astropy. It can be downloaded from ``iers.IERS_A_URL`` or ``iers.IERS_A_URL_MIRROR``. See ``iers.__doc__`` for instructions on use in ``Time``, etc. """ iers_table = None @classmethod def _combine_a_b_columns(cls, iers_a): """ Return a new table with appropriate combination of IERS_A and B columns. """ # IERS A has some rows at the end that hold nothing but dates & MJD # presumably to be filled later. Exclude those a priori -- there # should at least be a predicted UT1-UTC and PM! table = iers_a[np.isfinite(iers_a['UT1_UTC_A']) & (iers_a['PolPMFlag_A'] != '')] # This does nothing for IERS_A, but allows IERS_Auto to ensure the # IERS B values in the table are consistent with the true ones. table = cls._substitute_iers_b(table) # Combine A and B columns, using B where possible. b_bad = np.isnan(table['UT1_UTC_B']) table['UT1_UTC'] = np.where(b_bad, table['UT1_UTC_A'], table['UT1_UTC_B']) table['UT1Flag'] = np.where(b_bad, table['UT1Flag_A'], 'B') # Repeat for polar motions. b_bad = np.isnan(table['PM_X_B']) | np.isnan(table['PM_Y_B']) table['PM_x'] = np.where(b_bad, table['PM_x_A'], table['PM_X_B']) table['PM_y'] = np.where(b_bad, table['PM_y_A'], table['PM_Y_B']) table['PolPMFlag'] = np.where(b_bad, table['PolPMFlag_A'], 'B') b_bad = np.isnan(table['dX_2000A_B']) | np.isnan(table['dY_2000A_B']) table['dX_2000A'] = np.where(b_bad, table['dX_2000A_A'], table['dX_2000A_B']) table['dY_2000A'] = np.where(b_bad, table['dY_2000A_A'], table['dY_2000A_B']) table['NutFlag'] = np.where(b_bad, table['NutFlag_A'], 'B') # Get the table index for the first row that has predictive values # PolPMFlag_A IERS (I) or Prediction (P) flag for # Bull. A polar motion values # UT1Flag_A IERS (I) or Prediction (P) flag for # Bull. A UT1-UTC values # Since only 'P' and 'I' are possible and 'P' is guaranteed to come # after 'I', we can use searchsorted for 100 times speed up over # finding the first index where the flag equals 'P'. p_index = min(np.searchsorted(table['UT1Flag_A'], 'P'), np.searchsorted(table['PolPMFlag_A'], 'P')) table.meta['predictive_index'] = p_index table.meta['predictive_mjd'] = table['MJD'][p_index].value return table @classmethod def _substitute_iers_b(cls, table): # See documentation in IERS_Auto. return table @classmethod def read(cls, file=None, readme=None): """Read IERS-A table from a finals2000a.* file provided by USNO. Parameters ---------- file : str full path to ascii file holding IERS-A data. Defaults to ``iers.IERS_A_FILE``. readme : str full path to ascii file holding CDS-style readme. Defaults to package version, ``iers.IERS_A_README``. Returns ------- ``IERS_A`` class instance """ if file is None: file = IERS_A_FILE if readme is None: readme = IERS_A_README iers_a = super().read(file, format='cds', readme=readme) # Combine the A and B data for UT1-UTC and PM columns table = cls._combine_a_b_columns(iers_a) table.meta['data_path'] = file table.meta['readme_path'] = readme return table def ut1_utc_source(self, i): """Set UT1-UTC source flag for entries in IERS table""" ut1flag = self['UT1Flag'][i] source = np.ones_like(i) * FROM_IERS_B source[ut1flag == 'I'] = FROM_IERS_A source[ut1flag == 'P'] = FROM_IERS_A_PREDICTION return source def dcip_source(self, i): """Set CIP correction source flag for entries in IERS table""" nutflag = self['NutFlag'][i] source = np.ones_like(i) * FROM_IERS_B source[nutflag == 'I'] = FROM_IERS_A source[nutflag == 'P'] = FROM_IERS_A_PREDICTION return source def pm_source(self, i): """Set polar motion source flag for entries in IERS table""" pmflag = self['PolPMFlag'][i] source = np.ones_like(i) * FROM_IERS_B source[pmflag == 'I'] = FROM_IERS_A source[pmflag == 'P'] = FROM_IERS_A_PREDICTION return source class IERS_B(IERS): """IERS Table class targeted to IERS B, provided by IERS itself. These are final values; see https://www.iers.org/IERS/EN/Home/home_node.html Notes ----- If the package IERS B file (```iers.IERS_B_FILE``) is out of date, a new version can be downloaded from ``iers.IERS_B_URL``. """ iers_table = None @classmethod def read(cls, file=None, readme=None, data_start=14): """Read IERS-B table from a eopc04_iau2000.* file provided by IERS. Parameters ---------- file : str full path to ascii file holding IERS-B data. Defaults to package version, ``iers.IERS_B_FILE``. readme : str full path to ascii file holding CDS-style readme. Defaults to package version, ``iers.IERS_B_README``. data_start : int starting row. Default is 14, appropriate for standard IERS files. Returns ------- ``IERS_B`` class instance """ if file is None: file = IERS_B_FILE if readme is None: readme = IERS_B_README table = super().read(file, format='cds', readme=readme, data_start=data_start) table.meta['data_path'] = file table.meta['readme_path'] = readme return table def ut1_utc_source(self, i): """Set UT1-UTC source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B def dcip_source(self, i): """Set CIP correction source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B def pm_source(self, i): """Set PM source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B class IERS_Auto(IERS_A): """ Provide most-recent IERS data and automatically handle downloading of updated values as necessary. """ iers_table = None @classmethod def open(cls): """If the configuration setting ``astropy.utils.iers.conf.auto_download`` is set to True (default), then open a recent version of the IERS-A table with predictions for UT1-UTC and polar motion out to approximately one year from now. If the available version of this file is older than ``astropy.utils.iers.conf.auto_max_age`` days old (or non-existent) then it will be downloaded over the network and cached. If the configuration setting ``astropy.utils.iers.conf.auto_download`` is set to False then ``astropy.utils.iers.IERS()`` is returned. This is normally the IERS-B table that is supplied with astropy. On the first call in a session, the table will be memoized (in the ``iers_table`` class attribute), and further calls to ``open`` will return this stored table. Returns ------- `~astropy.table.QTable` instance With IERS (Earth rotation) data columns """ if not conf.auto_download: cls.iers_table = IERS_B.open() return cls.iers_table all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror) if cls.iers_table is not None: # If the URL has changed, we need to redownload the file, so we # should ignore the internally cached version. if cls.iers_table.meta.get('data_url') in all_urls: return cls.iers_table try: filename = download_file(all_urls[0], sources=all_urls, cache=True) except Exception as err: # Issue a warning here, perhaps user is offline. An exception # will be raised downstream when actually trying to interpolate # predictive values. warn(AstropyWarning( f'failed to download {" and ".join(all_urls)}, ' f'using local IERS-B: {err}')) cls.iers_table = IERS_B.open() return cls.iers_table cls.iers_table = cls.read(file=filename) cls.iers_table.meta['data_url'] = all_urls[0] return cls.iers_table def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd): """Check that the indices from interpolation match those after clipping to the valid table range. The IERS_Auto class is exempted as long as it has sufficiently recent available data so the clipped interpolation is always within the confidence bounds of current Earth rotation knowledge. """ predictive_mjd = self.meta['predictive_mjd'] # See explanation in _refresh_table_as_needed for these conditions auto_max_age = _none_to_float(conf.auto_max_age) if (max_input_mjd > predictive_mjd and self.time_now.mjd - predictive_mjd > auto_max_age): raise ValueError(INTERPOLATE_ERROR.format(auto_max_age)) def _refresh_table_as_needed(self, mjd): """Potentially update the IERS table in place depending on the requested time values in ``mjd`` and the time span of the table. For IERS_Auto the behavior is that the table is refreshed from the IERS server if both the following apply: - Any of the requested IERS values are predictive. The IERS-A table contains predictive data out for a year after the available definitive values. - The first predictive values are at least ``conf.auto_max_age days`` old. In other words the IERS-A table was created by IERS long enough ago that it can be considered stale for predictions. """ max_input_mjd = np.max(mjd) now_mjd = self.time_now.mjd # IERS-A table contains predictive data out for a year after # the available definitive values. fpi = self.meta['predictive_index'] predictive_mjd = self.meta['predictive_mjd'] # Update table in place if necessary auto_max_age = _none_to_float(conf.auto_max_age) # If auto_max_age is smaller than IERS update time then repeated downloads may # occur without getting updated values (giving a IERSStaleWarning). if auto_max_age < 10: raise ValueError('IERS auto_max_age configuration value must be larger than 10 days') if (max_input_mjd > predictive_mjd and (now_mjd - predictive_mjd) > auto_max_age): all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror) # Get the latest version try: filename = download_file( all_urls[0], sources=all_urls, cache="update") except Exception as err: # Issue a warning here, perhaps user is offline. An exception # will be raised downstream when actually trying to interpolate # predictive values. warn(AstropyWarning( f'failed to download {" and ".join(all_urls)}: {err}.\n' 'A coordinate or time-related ' 'calculation might be compromised or fail because the dates are ' 'not covered by the available IERS file. See the ' '"IERS data access" section of the astropy documentation ' 'for additional information on working offline.')) return new_table = self.__class__.read(file=filename) new_table.meta['data_url'] = str(all_urls[0]) # New table has new values? if new_table['MJD'][-1] > self['MJD'][-1]: # Replace *replace* current values from the first predictive index through # the end of the current table. This replacement is much faster than just # deleting all rows and then using add_row for the whole duration. new_fpi = np.searchsorted(new_table['MJD'].value, predictive_mjd, side='right') n_replace = len(self) - fpi self[fpi:] = new_table[new_fpi:new_fpi + n_replace] # Sanity check for continuity if new_table['MJD'][new_fpi + n_replace] - self['MJD'][-1] != 1.0 * u.d: raise ValueError('unexpected gap in MJD when refreshing IERS table') # Now add new rows in place for row in new_table[new_fpi + n_replace:]: self.add_row(row) self.meta.update(new_table.meta) else: warn(IERSStaleWarning( 'IERS_Auto predictive values are older than {} days but downloading ' 'the latest table did not find newer values'.format(conf.auto_max_age))) @classmethod def _substitute_iers_b(cls, table): """Substitute IERS B values with those from a real IERS B table. IERS-A has IERS-B values included, but for reasons unknown these do not match the latest IERS-B values (see comments in #4436). Here, we use the bundled astropy IERS-B table to overwrite the values in the downloaded IERS-A table. """ iers_b = IERS_B.open() # Substitute IERS-B values for existing B values in IERS-A table mjd_b = table['MJD'][np.isfinite(table['UT1_UTC_B'])] i0 = np.searchsorted(iers_b['MJD'], mjd_b[0], side='left') i1 = np.searchsorted(iers_b['MJD'], mjd_b[-1], side='right') iers_b = iers_b[i0:i1] n_iers_b = len(iers_b) # If there is overlap then replace IERS-A values from available IERS-B if n_iers_b > 0: # Sanity check that we are overwriting the correct values if not u.allclose(table['MJD'][:n_iers_b], iers_b['MJD']): raise ValueError('unexpected mismatch when copying ' 'IERS-B values into IERS-A table.') # Finally do the overwrite table['UT1_UTC_B'][:n_iers_b] = iers_b['UT1_UTC'] table['PM_X_B'][:n_iers_b] = iers_b['PM_x'] table['PM_Y_B'][:n_iers_b] = iers_b['PM_y'] table['dX_2000A_B'][:n_iers_b] = iers_b['dX_2000A'] table['dY_2000A_B'][:n_iers_b] = iers_b['dY_2000A'] return table class earth_orientation_table(ScienceState): """Default IERS table for Earth rotation and reference systems service. These tables are used to calculate the offsets between ``UT1`` and ``UTC`` and for conversion to Earth-based coordinate systems. The state itself is an IERS table, as an instance of one of the `~astropy.utils.iers.IERS` classes. The default, the auto-updating `~astropy.utils.iers.IERS_Auto` class, should suffice for most purposes. Examples -------- To temporarily use the IERS-B file packaged with astropy:: >>> from astropy.utils import iers >>> from astropy.time import Time >>> iers_b = iers.IERS_B.open(iers.IERS_B_FILE) >>> with iers.earth_orientation_table.set(iers_b): ... print(Time('2000-01-01').ut1.isot) 2000-01-01T00:00:00.355 To use the most recent IERS-A file for the whole session:: >>> iers_a = iers.IERS_A.open(iers.IERS_A_URL) # doctest: +SKIP >>> iers.earth_orientation_table.set(iers_a) # doctest: +SKIP <ScienceState earth_orientation_table: <IERS_A length=17463>...> To go back to the default (of `~astropy.utils.iers.IERS_Auto`):: >>> iers.earth_orientation_table.set(None) # doctest: +SKIP <ScienceState earth_orientation_table: <IERS_Auto length=17428>...> """ _value = None @classmethod def validate(cls, value): if value is None: value = IERS_Auto.open() if not isinstance(value, IERS): raise ValueError("earth_orientation_table requires an IERS Table.") return value class LeapSeconds(QTable): """Leap seconds class, holding TAI-UTC differences. The table should hold columns 'year', 'month', 'tai_utc'. Methods are provided to initialize the table from IERS ``Leap_Second.dat``, IETF/ntp ``leap-seconds.list``, or built-in ERFA/SOFA, and to update the list used by ERFA. Notes ----- Astropy has a built-in ``iers.IERS_LEAP_SECONDS_FILE``. Up to date versions can be downloaded from ``iers.IERS_LEAP_SECONDS_URL`` or ``iers.LEAP_SECONDS_LIST_URL``. Many systems also store a version of ``leap-seconds.list`` for use with ``ntp`` (e.g., on Debian/Ubuntu systems, ``/usr/share/zoneinfo/leap-seconds.list``). To prevent querying internet resources if the available local leap second file(s) are out of date, set ``iers.conf.auto_download = False``. This must be done prior to performing any ``Time`` scale transformations related to UTC (e.g. converting from UTC to TAI). """ # Note: Time instances in this class should use scale='tai' to avoid # needing leap seconds in their creation or interpretation. _re_expires = re.compile(r'^#.*File expires on[:\s]+(\d+\s\w+\s\d+)\s*$') _expires = None _auto_open_files = ['erfa', IERS_LEAP_SECOND_FILE, 'system_leap_second_file', 'iers_leap_second_auto_url', 'ietf_leap_second_auto_url'] """Files or conf attributes to try in auto_open.""" @classmethod def open(cls, file=None, cache=False): """Open a leap-second list. Parameters ---------- file : path-like or None Full local or network path to the file holding leap-second data, for passing on to the various ``from_`` class methods. If 'erfa', return the data used by the ERFA library. If `None`, use default locations from file and configuration to find a table that is not expired. cache : bool Whether to use cache. Defaults to False, since leap-second files are regularly updated. Returns ------- leap_seconds : `~astropy.utils.iers.LeapSeconds` Table with 'year', 'month', and 'tai_utc' columns, plus possibly others. Notes ----- Bulletin C is released about 10 days after a possible leap second is introduced, i.e., mid-January or mid-July. Expiration days are thus generally at least 150 days after the present. For the auto-loading, a list comprised of the table shipped with astropy, and files and URLs in `~astropy.utils.iers.Conf` are tried, returning the first that is sufficiently new, or the newest among them all. """ if file is None: return cls.auto_open() if file.lower() == 'erfa': return cls.from_erfa() if urlparse(file).netloc: file = download_file(file, cache=cache) # Just try both reading methods. try: return cls.from_iers_leap_seconds(file) except Exception: return cls.from_leap_seconds_list(file) @staticmethod def _today(): # Get current day in scale='tai' without going through a scale change # (so we do not need leap seconds). s = '{0.year:04d}-{0.month:02d}-{0.day:02d}'.format(datetime.utcnow()) return Time(s, scale='tai', format='iso', out_subfmt='date') @classmethod def auto_open(cls, files=None): """Attempt to get an up-to-date leap-second list. The routine will try the files in sequence until it finds one whose expiration date is "good enough" (see below). If none are good enough, it returns the one with the most recent expiration date, warning if that file is expired. For remote files that are cached already, the cached file is tried first before attempting to retrieve it again. Parameters ---------- files : list of path-like, optional List of files/URLs to attempt to open. By default, uses ``cls._auto_open_files``. Returns ------- leap_seconds : `~astropy.utils.iers.LeapSeconds` Up to date leap-second table Notes ----- Bulletin C is released about 10 days after a possible leap second is introduced, i.e., mid-January or mid-July. Expiration days are thus generally at least 150 days after the present. We look for a file that expires more than 180 - `~astropy.utils.iers.Conf.auto_max_age` after the present. """ good_enough = cls._today() + TimeDelta(180-_none_to_float(conf.auto_max_age), format='jd') if files is None: # Basic files to go over (entries in _auto_open_files can be # configuration items, which we want to be sure are up to date). files = [getattr(conf, f, f) for f in cls._auto_open_files] # Remove empty entries. files = [f for f in files if f] # Our trials start with normal files and remote ones that are # already in cache. The bools here indicate that the cache # should be used. trials = [(f, True) for f in files if not urlparse(f).netloc or is_url_in_cache(f)] # If we are allowed to download, we try downloading new versions # if none of the above worked. if conf.auto_download: trials += [(f, False) for f in files if urlparse(f).netloc] self = None err_list = [] # Go through all entries, and return the first one that # is not expired, or the most up to date one. for f, allow_cache in trials: if not allow_cache: clear_download_cache(f) try: trial = cls.open(f, cache=True) except Exception as exc: err_list.append(exc) continue if self is None or trial.expires > self.expires: self = trial self.meta['data_url'] = str(f) if self.expires > good_enough: break if self is None: raise ValueError('none of the files could be read. The ' 'following errors were raised:\n' + str(err_list)) if self.expires < self._today(): warn('leap-second file is expired.', IERSStaleWarning) return self @property def expires(self): """The limit of validity of the table.""" return self._expires @classmethod def _read_leap_seconds(cls, file, **kwargs): """Read a file, identifying expiration by matching 'File expires'""" expires = None # Find expiration date. with get_readable_fileobj(file) as fh: lines = fh.readlines() for line in lines: match = cls._re_expires.match(line) if match: day, month, year = match.groups()[0].split() month_nb = MONTH_ABBR.index(month[:3]) + 1 expires = Time(f'{year}-{month_nb:02d}-{day}', scale='tai', out_subfmt='date') break else: raise ValueError(f'did not find expiration date in {file}') self = cls.read(lines, format='ascii.no_header', **kwargs) self._expires = expires return self @classmethod def from_iers_leap_seconds(cls, file=IERS_LEAP_SECOND_FILE): """Create a table from a file like the IERS ``Leap_Second.dat``. Parameters ---------- file : path-like, optional Full local or network path to the file holding leap-second data in a format consistent with that used by IERS. By default, uses ``iers.IERS_LEAP_SECOND_FILE``. Notes ----- The file *must* contain the expiration date in a comment line, like '# File expires on 28 June 2020' """ return cls._read_leap_seconds( file, names=['mjd', 'day', 'month', 'year', 'tai_utc']) @classmethod def from_leap_seconds_list(cls, file): """Create a table from a file like the IETF ``leap-seconds.list``. Parameters ---------- file : path-like, optional Full local or network path to the file holding leap-second data in a format consistent with that used by IETF. Up to date versions can be retrieved from ``iers.IETF_LEAP_SECOND_URL``. Notes ----- The file *must* contain the expiration date in a comment line, like '# File expires on: 28 June 2020' """ from astropy.io.ascii import convert_numpy # Here to avoid circular import names = ['ntp_seconds', 'tai_utc', 'comment', 'day', 'month', 'year'] # Note: ntp_seconds does not fit in 32 bit, so causes problems on # 32-bit systems without the np.int64 converter. self = cls._read_leap_seconds( file, names=names, include_names=names[:2], converters={'ntp_seconds': [convert_numpy(np.int64)]}) self['mjd'] = (self['ntp_seconds']/86400 + 15020).round() # Note: cannot use Time.ymdhms, since that might require leap seconds. isot = Time(self['mjd'], format='mjd', scale='tai').isot ymd = np.array([[int(part) for part in t.partition('T')[0].split('-')] for t in isot]) self['year'], self['month'], self['day'] = ymd.T return self @classmethod def from_erfa(cls, built_in=False): """Create table from the leap-second list in ERFA. Parameters ---------- built_in : bool If `False` (default), retrieve the list currently used by ERFA, which may have been updated. If `True`, retrieve the list shipped with erfa. """ current = cls(erfa.leap_seconds.get()) current._expires = Time('{0.year:04d}-{0.month:02d}-{0.day:02d}' .format(erfa.leap_seconds.expires), scale='tai') if not built_in: return current try: erfa.leap_seconds.set(None) # reset to defaults return cls.from_erfa(built_in=False) finally: erfa.leap_seconds.set(current) def update_erfa_leap_seconds(self, initialize_erfa=False): """Add any leap seconds not already present to the ERFA table. This method matches leap seconds with those present in the ERFA table, and extends the latter as necessary. Parameters ---------- initialize_erfa : bool, or 'only', or 'empty' Initialize the ERFA leap second table to its built-in value before trying to expand it. This is generally not needed but can help in case it somehow got corrupted. If equal to 'only', the ERFA table is reinitialized and no attempt it made to update it. If 'empty', the leap second table is emptied before updating, i.e., it is overwritten altogether (note that this may break things in surprising ways, as most leap second tables do not include pre-1970 pseudo leap-seconds; you were warned). Returns ------- n_update : int Number of items updated. Raises ------ ValueError If the leap seconds in the table are not on 1st of January or July, or if the matches are inconsistent. This would normally suggest a currupted leap second table, but might also indicate that the ERFA table was corrupted. If needed, the ERFA table can be reset by calling this method with an appropriate value for ``initialize_erfa``. """ if initialize_erfa == 'empty': # Initialize to empty and update is the same as overwrite. erfa.leap_seconds.set(self) return len(self) if initialize_erfa: erfa.leap_seconds.set() if initialize_erfa == 'only': return 0 return erfa.leap_seconds.update(self)
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest asdf = pytest.importorskip('asdf') import io from astropy import units as u from asdf.tests import helpers # TODO: Implement defunit def test_unit(): yaml = """ unit: !unit/unit-1.0.0 "2.1798721 10-18kg m2 s-2" """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff) as ff: assert ff.tree['unit'].is_equivalent(u.Ry) buff2 = io.BytesIO() ff.write_to(buff2) buff2.seek(0) with asdf.open(buff2) as ff: assert ff.tree['unit'].is_equivalent(u.Ry)
lpsinger/astropy
astropy/io/misc/asdf/tags/unit/tests/test_unit.py
astropy/utils/iers/iers.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This module contains simple input/output related functionality that is not part of a larger framework or standard. """ import pickle __all__ = ['fnpickle', 'fnunpickle'] def fnunpickle(fileorname, number=0): """ Unpickle pickled objects from a specified file and return the contents. Parameters ---------- fileorname : str or file-like The file name or file from which to unpickle objects. If a file object, it should have been opened in binary mode. number : int If 0, a single object will be returned (the first in the file). If >0, this specifies the number of objects to be unpickled, and a list will be returned with exactly that many objects. If <0, all objects in the file will be unpickled and returned as a list. Raises ------ EOFError If ``number`` is >0 and there are fewer than ``number`` objects in the pickled file. Returns ------- contents : obj or list If ``number`` is 0, this is a individual object - the first one unpickled from the file. Otherwise, it is a list of objects unpickled from the file. """ if isinstance(fileorname, str): f = open(fileorname, 'rb') close = True else: f = fileorname close = False try: if number > 0: # get that number res = [] for i in range(number): res.append(pickle.load(f)) elif number < 0: # get all objects res = [] eof = False while not eof: try: res.append(pickle.load(f)) except EOFError: eof = True else: # number==0 res = pickle.load(f) finally: if close: f.close() return res def fnpickle(object, fileorname, protocol=None, append=False): """Pickle an object to a specified file. Parameters ---------- object The python object to pickle. fileorname : str or file-like The filename or file into which the `object` should be pickled. If a file object, it should have been opened in binary mode. protocol : int or None Pickle protocol to use - see the :mod:`pickle` module for details on these options. If None, the most recent protocol will be used. append : bool If True, the object is appended to the end of the file, otherwise the file will be overwritten (if a file object is given instead of a file name, this has no effect). """ if protocol is None: protocol = pickle.HIGHEST_PROTOCOL if isinstance(fileorname, str): f = open(fileorname, 'ab' if append else 'wb') close = True else: f = fileorname close = False try: pickle.dump(object, f, protocol=protocol) finally: if close: f.close()
# Licensed under a 3-clause BSD style license - see LICENSE.rst import itertools import pytest import numpy as np from numpy.testing import assert_almost_equal, assert_allclose from astropy import units as u from astropy.convolution.convolve import convolve, convolve_fft from astropy.convolution.kernels import Gaussian2DKernel, Box2DKernel, Tophat2DKernel from astropy.convolution.kernels import Moffat2DKernel from astropy.utils.exceptions import AstropyDeprecationWarning SHAPES_ODD = [[15, 15], [31, 31]] SHAPES_EVEN = [[8, 8], [16, 16], [32, 32]] # FIXME: not used ?! NOSHAPE = [[None, None]] WIDTHS = [2, 3, 4, 5] KERNELS = [] for shape in SHAPES_ODD + NOSHAPE: for width in WIDTHS: KERNELS.append(Gaussian2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Box2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Tophat2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Moffat2DKernel(width, 2, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) class Test2DConvolutions: @pytest.mark.parametrize('kernel', KERNELS) def test_centered_makekernel(self, kernel): """ Test smoothing of an image with a single positive pixel """ shape = kernel.array.shape x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize('kernel', KERNELS) def test_random_makekernel(self, kernel): """ Test smoothing of an image made of random noise """ shape = kernel.array.shape x = np.random.randn(*shape) c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') # not clear why, but these differ by a couple ulps... assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, WIDTHS))) def test_uniform_smallkernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Uses a simple, small kernel """ if width % 2 == 0: # convolve does not accept odd-shape kernels return kernel = np.ones([width, width]) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, [1, 3, 5]))) def test_smallkernel_Box2DKernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Compares a small uniform kernel to the Box2DKernel """ kernel1 = np.ones([width, width]) / float(width) ** 2 kernel2 = Box2DKernel(width, mode='oversample', factor=10) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel2, boundary='fill') c1 = convolve_fft(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) c2 = convolve(x, kernel2, boundary='fill') c1 = convolve(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) def test_gaussian_2d_kernel_quantity(): # Make sure that the angle can be a quantity kernel1 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=45 * u.deg) kernel2 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=np.pi / 4) assert_allclose(kernel1.array, kernel2.array) def test_deprecated_hat(): # 'MexicanHat' was deprecated as a name for the kernels which are now # 'RickerWavelet'. This test ensures that the kernels are correctly # deprecated, and can be imported from the top-level package. from astropy.convolution import MexicanHat1DKernel, MexicanHat2DKernel with pytest.warns(AstropyDeprecationWarning): MexicanHat1DKernel(2) with pytest.warns(AstropyDeprecationWarning): MexicanHat2DKernel(2)
dhomeier/astropy
astropy/convolution/tests/test_convolve_kernels.py
astropy/io/misc/pickle_helpers.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst import io import os from os.path import join import os.path import shutil import sys from collections import defaultdict from setuptools import Extension from setuptools.dep_util import newer_group import numpy from extension_helpers import import_file, write_if_different, get_compiler, pkg_config WCSROOT = os.path.relpath(os.path.dirname(__file__)) WCSVERSION = "7.6" def b(s): return s.encode('ascii') def string_escape(s): s = s.decode('ascii').encode('ascii', 'backslashreplace') s = s.replace(b'\n', b'\\n') s = s.replace(b'\0', b'\\0') return s.decode('ascii') def determine_64_bit_int(): """ The only configuration parameter needed at compile-time is how to specify a 64-bit signed integer. Python's ctypes module can get us that information. If we can't be absolutely certain, we default to "long long int", which is correct on most platforms (x86, x86_64). If we find platforms where this heuristic doesn't work, we may need to hardcode for them. """ try: try: import ctypes except ImportError: raise ValueError() if ctypes.sizeof(ctypes.c_longlong) == 8: return "long long int" elif ctypes.sizeof(ctypes.c_long) == 8: return "long int" elif ctypes.sizeof(ctypes.c_int) == 8: return "int" else: raise ValueError() except ValueError: return "long long int" def write_wcsconfig_h(paths): """ Writes out the wcsconfig.h header with local configuration. """ h_file = io.StringIO() h_file.write(""" /* The bundled version has WCSLIB_VERSION */ #define HAVE_WCSLIB_VERSION 1 /* WCSLIB library version number. */ #define WCSLIB_VERSION {} /* 64-bit integer data type. */ #define WCSLIB_INT64 {} /* Windows needs some other defines to prevent inclusion of wcsset() which conflicts with wcslib's wcsset(). These need to be set on code that *uses* astropy.wcs, in addition to astropy.wcs itself. */ #if defined(_WIN32) || defined(_MSC_VER) || defined(__MINGW32__) || defined (__MINGW64__) #ifndef YY_NO_UNISTD_H #define YY_NO_UNISTD_H #endif #ifndef _CRT_SECURE_NO_WARNINGS #define _CRT_SECURE_NO_WARNINGS #endif #ifndef _NO_OLDNAMES #define _NO_OLDNAMES #endif #ifndef NO_OLDNAMES #define NO_OLDNAMES #endif #ifndef __STDC__ #define __STDC__ 1 #endif #endif """.format(WCSVERSION, determine_64_bit_int())) content = h_file.getvalue().encode('ascii') for path in paths: write_if_different(path, content) ###################################################################### # GENERATE DOCSTRINGS IN C def generate_c_docstrings(): docstrings = import_file(os.path.join(WCSROOT, 'docstrings.py')) docstrings = docstrings.__dict__ keys = [ key for key, val in docstrings.items() if not key.startswith('__') and isinstance(val, str)] keys.sort() docs = {} for key in keys: docs[key] = docstrings[key].encode('utf8').lstrip() + b'\0' h_file = io.StringIO() h_file.write("""/* DO NOT EDIT! This file is autogenerated by astropy/wcs/setup_package.py. To edit its contents, edit astropy/wcs/docstrings.py */ #ifndef __DOCSTRINGS_H__ #define __DOCSTRINGS_H__ """) for key in keys: val = docs[key] h_file.write(f'extern char doc_{key}[{len(val)}];\n') h_file.write("\n#endif\n\n") write_if_different( join(WCSROOT, 'include', 'astropy_wcs', 'docstrings.h'), h_file.getvalue().encode('utf-8')) c_file = io.StringIO() c_file.write("""/* DO NOT EDIT! This file is autogenerated by astropy/wcs/setup_package.py. To edit its contents, edit astropy/wcs/docstrings.py The weirdness here with strncpy is because some C compilers, notably MSVC, do not support string literals greater than 256 characters. */ #include <string.h> #include "astropy_wcs/docstrings.h" """) for key in keys: val = docs[key] c_file.write(f'char doc_{key}[{len(val)}] = {{\n') for i in range(0, len(val), 12): section = val[i:i+12] c_file.write(' ') c_file.write(''.join(f'0x{x:02x}, ' for x in section)) c_file.write('\n') c_file.write(" };\n\n") write_if_different( join(WCSROOT, 'src', 'docstrings.c'), c_file.getvalue().encode('utf-8')) def get_wcslib_cfg(cfg, wcslib_files, include_paths): debug = '--debug' in sys.argv cfg['include_dirs'].append(numpy.get_include()) cfg['define_macros'].extend([ ('ECHO', None), ('WCSTRIG_MACRO', None), ('ASTROPY_WCS_BUILD', None), ('_GNU_SOURCE', None)]) if ((int(os.environ.get('ASTROPY_USE_SYSTEM_WCSLIB', 0)) or int(os.environ.get('ASTROPY_USE_SYSTEM_ALL', 0))) and not sys.platform == 'win32'): wcsconfig_h_path = join(WCSROOT, 'include', 'wcsconfig.h') if os.path.exists(wcsconfig_h_path): os.unlink(wcsconfig_h_path) for k, v in pkg_config(['wcslib'], ['wcs']).items(): cfg[k].extend(v) else: write_wcsconfig_h(include_paths) wcslib_path = join("cextern", "wcslib") # Path to wcslib wcslib_cpath = join(wcslib_path, "C") # Path to wcslib source files cfg['sources'].extend(join(wcslib_cpath, x) for x in wcslib_files) cfg['include_dirs'].append(wcslib_cpath) if debug: cfg['define_macros'].append(('DEBUG', None)) cfg['undef_macros'].append('NDEBUG') if (not sys.platform.startswith('sun') and not sys.platform == 'win32'): cfg['extra_compile_args'].extend(["-fno-inline", "-O0", "-g"]) else: # Define ECHO as nothing to prevent spurious newlines from # printing within the libwcs parser cfg['define_macros'].append(('NDEBUG', None)) cfg['undef_macros'].append('DEBUG') if sys.platform == 'win32': # These are written into wcsconfig.h, but that file is not # used by all parts of wcslib. cfg['define_macros'].extend([ ('YY_NO_UNISTD_H', None), ('_CRT_SECURE_NO_WARNINGS', None), ('_NO_OLDNAMES', None), # for mingw32 ('NO_OLDNAMES', None), # for mingw64 ('__STDC__', None) # for MSVC ]) if sys.platform.startswith('linux'): cfg['define_macros'].append(('HAVE_SINCOS', None)) # For 4.7+ enable C99 syntax in older compilers (need 'gnu99' std for gcc) if determine_64_bit_int() != 'int' and get_compiler() == 'unix': cfg['extra_compile_args'].extend(['-std=gnu99']) else: cfg['extra_compile_args'].extend(['-std=c99']) # Squelch a few compilation warnings in WCSLIB if get_compiler() in ('unix', 'mingw32'): if not debug: cfg['extra_compile_args'].extend([ '-Wno-strict-prototypes', '-Wno-unused-function', '-Wno-unused-value', '-Wno-uninitialized']) def get_extensions(): generate_c_docstrings() ###################################################################### # DISTUTILS SETUP cfg = defaultdict(list) wcslib_files = [ # List of wcslib files to compile 'flexed/wcsbth.c', 'flexed/wcspih.c', 'flexed/wcsulex.c', 'flexed/wcsutrn.c', 'cel.c', 'dis.c', 'lin.c', 'log.c', 'prj.c', 'spc.c', 'sph.c', 'spx.c', 'tab.c', 'wcs.c', 'wcserr.c', 'wcsfix.c', 'wcshdr.c', 'wcsprintf.c', 'wcsunits.c', 'wcsutil.c' ] wcslib_config_paths = [ join(WCSROOT, 'include', 'astropy_wcs', 'wcsconfig.h'), join(WCSROOT, 'include', 'wcsconfig.h') ] get_wcslib_cfg(cfg, wcslib_files, wcslib_config_paths) cfg['include_dirs'].append(join(WCSROOT, "include")) astropy_wcs_files = [ # List of astropy.wcs files to compile 'distortion.c', 'distortion_wrap.c', 'docstrings.c', 'pipeline.c', 'pyutil.c', 'astropy_wcs.c', 'astropy_wcs_api.c', 'sip.c', 'sip_wrap.c', 'str_list_proxy.c', 'unit_list_proxy.c', 'util.c', 'wcslib_wrap.c', 'wcslib_auxprm_wrap.c', 'wcslib_tabprm_wrap.c', 'wcslib_wtbarr_wrap.c' ] cfg['sources'].extend(join(WCSROOT, 'src', x) for x in astropy_wcs_files) cfg['sources'] = [str(x) for x in cfg['sources']] cfg = dict((str(key), val) for key, val in cfg.items()) # Copy over header files from WCSLIB into the installed version of Astropy # so that other Python packages can write extensions that link to it. We # do the copying here then include the data in [options.package_data] in # the setup.cfg file wcslib_headers = [ 'cel.h', 'lin.h', 'prj.h', 'spc.h', 'spx.h', 'tab.h', 'wcs.h', 'wcserr.h', 'wcsmath.h', 'wcsprintf.h', ] if not (int(os.environ.get('ASTROPY_USE_SYSTEM_WCSLIB', 0)) or int(os.environ.get('ASTROPY_USE_SYSTEM_ALL', 0))): for header in wcslib_headers: source = join('cextern', 'wcslib', 'C', header) dest = join('astropy', 'wcs', 'include', 'wcslib', header) if newer_group([source], dest, 'newer'): shutil.copy(source, dest) return [Extension('astropy.wcs._wcs', **cfg)]
# Licensed under a 3-clause BSD style license - see LICENSE.rst import itertools import pytest import numpy as np from numpy.testing import assert_almost_equal, assert_allclose from astropy import units as u from astropy.convolution.convolve import convolve, convolve_fft from astropy.convolution.kernels import Gaussian2DKernel, Box2DKernel, Tophat2DKernel from astropy.convolution.kernels import Moffat2DKernel from astropy.utils.exceptions import AstropyDeprecationWarning SHAPES_ODD = [[15, 15], [31, 31]] SHAPES_EVEN = [[8, 8], [16, 16], [32, 32]] # FIXME: not used ?! NOSHAPE = [[None, None]] WIDTHS = [2, 3, 4, 5] KERNELS = [] for shape in SHAPES_ODD + NOSHAPE: for width in WIDTHS: KERNELS.append(Gaussian2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Box2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Tophat2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Moffat2DKernel(width, 2, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) class Test2DConvolutions: @pytest.mark.parametrize('kernel', KERNELS) def test_centered_makekernel(self, kernel): """ Test smoothing of an image with a single positive pixel """ shape = kernel.array.shape x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize('kernel', KERNELS) def test_random_makekernel(self, kernel): """ Test smoothing of an image made of random noise """ shape = kernel.array.shape x = np.random.randn(*shape) c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') # not clear why, but these differ by a couple ulps... assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, WIDTHS))) def test_uniform_smallkernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Uses a simple, small kernel """ if width % 2 == 0: # convolve does not accept odd-shape kernels return kernel = np.ones([width, width]) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, [1, 3, 5]))) def test_smallkernel_Box2DKernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Compares a small uniform kernel to the Box2DKernel """ kernel1 = np.ones([width, width]) / float(width) ** 2 kernel2 = Box2DKernel(width, mode='oversample', factor=10) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel2, boundary='fill') c1 = convolve_fft(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) c2 = convolve(x, kernel2, boundary='fill') c1 = convolve(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) def test_gaussian_2d_kernel_quantity(): # Make sure that the angle can be a quantity kernel1 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=45 * u.deg) kernel2 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=np.pi / 4) assert_allclose(kernel1.array, kernel2.array) def test_deprecated_hat(): # 'MexicanHat' was deprecated as a name for the kernels which are now # 'RickerWavelet'. This test ensures that the kernels are correctly # deprecated, and can be imported from the top-level package. from astropy.convolution import MexicanHat1DKernel, MexicanHat2DKernel with pytest.warns(AstropyDeprecationWarning): MexicanHat1DKernel(2) with pytest.warns(AstropyDeprecationWarning): MexicanHat2DKernel(2)
dhomeier/astropy
astropy/convolution/tests/test_convolve_kernels.py
astropy/wcs/setup_package.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """URL unescaper functions.""" # STDLIB from xml.sax import saxutils __all__ = ['unescape_all'] # This is DIY _bytes_entities = {b'&amp;': b'&', b'&lt;': b'<', b'&gt;': b'>', b'&amp;&amp;': b'&', b'&&': b'&', b'%2F': b'/'} _bytes_keys = [b'&amp;&amp;', b'&&', b'&amp;', b'&lt;', b'&gt;', b'%2F'] # This is used by saxutils _str_entities = {'&amp;&amp;': '&', '&&': '&', '%2F': '/'} _str_keys = ['&amp;&amp;', '&&', '&amp;', '&lt;', '&gt;', '%2F'] def unescape_all(url): """Recursively unescape a given URL. .. note:: '&amp;&amp;' becomes a single '&'. Parameters ---------- url : str or bytes URL to unescape. Returns ------- clean_url : str or bytes Unescaped URL. """ if isinstance(url, bytes): func2use = _unescape_bytes keys2use = _bytes_keys else: func2use = _unescape_str keys2use = _str_keys clean_url = func2use(url) not_done = [clean_url.count(key) > 0 for key in keys2use] if True in not_done: return unescape_all(clean_url) else: return clean_url def _unescape_str(url): return saxutils.unescape(url, _str_entities) def _unescape_bytes(url): clean_url = url for key in _bytes_keys: clean_url = clean_url.replace(key, _bytes_entities[key]) return clean_url
# Licensed under a 3-clause BSD style license - see LICENSE.rst import itertools import pytest import numpy as np from numpy.testing import assert_almost_equal, assert_allclose from astropy import units as u from astropy.convolution.convolve import convolve, convolve_fft from astropy.convolution.kernels import Gaussian2DKernel, Box2DKernel, Tophat2DKernel from astropy.convolution.kernels import Moffat2DKernel from astropy.utils.exceptions import AstropyDeprecationWarning SHAPES_ODD = [[15, 15], [31, 31]] SHAPES_EVEN = [[8, 8], [16, 16], [32, 32]] # FIXME: not used ?! NOSHAPE = [[None, None]] WIDTHS = [2, 3, 4, 5] KERNELS = [] for shape in SHAPES_ODD + NOSHAPE: for width in WIDTHS: KERNELS.append(Gaussian2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Box2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Tophat2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Moffat2DKernel(width, 2, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) class Test2DConvolutions: @pytest.mark.parametrize('kernel', KERNELS) def test_centered_makekernel(self, kernel): """ Test smoothing of an image with a single positive pixel """ shape = kernel.array.shape x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize('kernel', KERNELS) def test_random_makekernel(self, kernel): """ Test smoothing of an image made of random noise """ shape = kernel.array.shape x = np.random.randn(*shape) c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') # not clear why, but these differ by a couple ulps... assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, WIDTHS))) def test_uniform_smallkernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Uses a simple, small kernel """ if width % 2 == 0: # convolve does not accept odd-shape kernels return kernel = np.ones([width, width]) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, [1, 3, 5]))) def test_smallkernel_Box2DKernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Compares a small uniform kernel to the Box2DKernel """ kernel1 = np.ones([width, width]) / float(width) ** 2 kernel2 = Box2DKernel(width, mode='oversample', factor=10) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel2, boundary='fill') c1 = convolve_fft(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) c2 = convolve(x, kernel2, boundary='fill') c1 = convolve(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) def test_gaussian_2d_kernel_quantity(): # Make sure that the angle can be a quantity kernel1 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=45 * u.deg) kernel2 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=np.pi / 4) assert_allclose(kernel1.array, kernel2.array) def test_deprecated_hat(): # 'MexicanHat' was deprecated as a name for the kernels which are now # 'RickerWavelet'. This test ensures that the kernels are correctly # deprecated, and can be imported from the top-level package. from astropy.convolution import MexicanHat1DKernel, MexicanHat2DKernel with pytest.warns(AstropyDeprecationWarning): MexicanHat1DKernel(2) with pytest.warns(AstropyDeprecationWarning): MexicanHat2DKernel(2)
dhomeier/astropy
astropy/convolution/tests/test_convolve_kernels.py
astropy/utils/xml/unescaper.py
import pytest import warnings # autouse makes this an all-coordinates-tests fixture # this can be eliminated if/when warnings in pytest are all turned to errors (gh issue #7928) @pytest.fixture(autouse=True) def representation_deprecation_to_error(): warnings.filterwarnings('error', 'The `representation` keyword/property name is deprecated in favor of `representation_type`') filt = warnings.filters[0] yield try: warnings.filters.remove(filt) except ValueError: pass # already removed
# Licensed under a 3-clause BSD style license - see LICENSE.rst import itertools import pytest import numpy as np from numpy.testing import assert_almost_equal, assert_allclose from astropy import units as u from astropy.convolution.convolve import convolve, convolve_fft from astropy.convolution.kernels import Gaussian2DKernel, Box2DKernel, Tophat2DKernel from astropy.convolution.kernels import Moffat2DKernel from astropy.utils.exceptions import AstropyDeprecationWarning SHAPES_ODD = [[15, 15], [31, 31]] SHAPES_EVEN = [[8, 8], [16, 16], [32, 32]] # FIXME: not used ?! NOSHAPE = [[None, None]] WIDTHS = [2, 3, 4, 5] KERNELS = [] for shape in SHAPES_ODD + NOSHAPE: for width in WIDTHS: KERNELS.append(Gaussian2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Box2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Tophat2DKernel(width, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) KERNELS.append(Moffat2DKernel(width, 2, x_size=shape[0], y_size=shape[1], mode='oversample', factor=10)) class Test2DConvolutions: @pytest.mark.parametrize('kernel', KERNELS) def test_centered_makekernel(self, kernel): """ Test smoothing of an image with a single positive pixel """ shape = kernel.array.shape x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize('kernel', KERNELS) def test_random_makekernel(self, kernel): """ Test smoothing of an image made of random noise """ shape = kernel.array.shape x = np.random.randn(*shape) c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') # not clear why, but these differ by a couple ulps... assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, WIDTHS))) def test_uniform_smallkernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Uses a simple, small kernel """ if width % 2 == 0: # convolve does not accept odd-shape kernels return kernel = np.ones([width, width]) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel, boundary='fill') c1 = convolve(x, kernel, boundary='fill') assert_almost_equal(c1, c2, decimal=12) @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, [1, 3, 5]))) def test_smallkernel_Box2DKernel(self, shape, width): """ Test smoothing of an image with a single positive pixel Compares a small uniform kernel to the Box2DKernel """ kernel1 = np.ones([width, width]) / float(width) ** 2 kernel2 = Box2DKernel(width, mode='oversample', factor=10) x = np.zeros(shape) xslice = tuple([slice(sh // 2, sh // 2 + 1) for sh in shape]) x[xslice] = 1.0 c2 = convolve_fft(x, kernel2, boundary='fill') c1 = convolve_fft(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) c2 = convolve(x, kernel2, boundary='fill') c1 = convolve(x, kernel1, boundary='fill') assert_almost_equal(c1, c2, decimal=12) def test_gaussian_2d_kernel_quantity(): # Make sure that the angle can be a quantity kernel1 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=45 * u.deg) kernel2 = Gaussian2DKernel(x_stddev=2, y_stddev=4, theta=np.pi / 4) assert_allclose(kernel1.array, kernel2.array) def test_deprecated_hat(): # 'MexicanHat' was deprecated as a name for the kernels which are now # 'RickerWavelet'. This test ensures that the kernels are correctly # deprecated, and can be imported from the top-level package. from astropy.convolution import MexicanHat1DKernel, MexicanHat2DKernel with pytest.warns(AstropyDeprecationWarning): MexicanHat1DKernel(2) with pytest.warns(AstropyDeprecationWarning): MexicanHat2DKernel(2)
dhomeier/astropy
astropy/convolution/tests/test_convolve_kernels.py
astropy/coordinates/tests/conftest.py
# -*- coding: utf-8 -*- from utils.appliance import get_or_create_current_appliance from utils.appliance.implementations.ui import navigate_to def simulate( instance=None, message=None, request=None, target_type=None, target_object=None, execute_methods=None, attributes_values=None, pre_clear=True, appliance=None): """Runs the simulation of specified Automate object.""" if not appliance: appliance = get_or_create_current_appliance() view = navigate_to(appliance.server, 'AutomateSimulation') if pre_clear: view.avp.clear() view.fill({ 'instance': 'Request', 'message': 'create', 'request': '', 'target_type': '<None>', 'execute_methods': True, }) view.fill({ 'instance': instance, 'message': message, 'request': request, 'target_type': target_type, 'target_object': target_object, 'execute_methods': execute_methods, 'avp': attributes_values, }) view.submit_button.click() view.flash.assert_no_error() view.flash.assert_message('Automation Simulation has been run') # TODO: After fixing the tree # return view.result_tree.read_contents()
# -*- coding: utf-8 -*- """This module contains REST API specific tests.""" import random import pytest import fauxfactory from cfme import test_requirements from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.infrastructure.provider.virtualcenter import VMwareProvider from cfme.rest.gen_data import arbitration_rules as _arbitration_rules from cfme.rest.gen_data import arbitration_settings as _arbitration_settings from cfme.rest.gen_data import automation_requests_data from cfme.rest.gen_data import vm as _vm from fixtures.provider import setup_one_or_skip from utils import error from utils.blockers import BZ from utils.providers import ProviderFilter from utils.rest import assert_response from utils.version import current_version from utils.wait import wait_for, wait_for_decorator pytestmark = [test_requirements.rest] @pytest.fixture(scope="module") def a_provider(request): pf = ProviderFilter(classes=[VMwareProvider, RHEVMProvider]) return setup_one_or_skip(request, filters=[pf]) @pytest.fixture(scope="function") def vm(request, a_provider, appliance): return _vm(request, a_provider, appliance.rest_api) def wait_for_requests(requests): def _finished(): for request in requests: request.reload() if request.request_state != 'finished': return False return True wait_for(_finished, num_sec=45, delay=5, message="requests finished") @pytest.mark.tier(2) @pytest.mark.parametrize( "from_detail", [True, False], ids=["from_detail", "from_collection"]) def test_vm_scan(appliance, vm, from_detail): rest_vm = appliance.rest_api.collections.vms.get(name=vm) if from_detail: response = rest_vm.action.scan() else: response, = appliance.rest_api.collections.vms.action.scan(rest_vm) assert_response(appliance) @wait_for_decorator(timeout="5m", delay=5, message="REST running scanning vm finishes") def _finished(): response.task.reload() if response.task.status.lower() in {"error"}: pytest.fail("Error when running scan vm method: `{}`".format(response.task.message)) return response.task.state.lower() == 'finished' COLLECTIONS_ADDED_IN_58 = { "actions", "alert_definitions", "alerts", "authentications", "configuration_script_payloads", "configuration_script_sources", "load_balancers", } COLLECTIONS_REMOVED_IN_59 = { "arbitration_settings", "arbitration_profiles", "virtual_templates", "arbitration_rules", } COLLECTIONS_ALL = { "actions", "alert_definitions", "alerts", "arbitration_profiles", "arbitration_rules", "arbitration_settings", "authentications", "automate", "automate_domains", "automation_requests", "availability_zones", "blueprints", "categories", "chargebacks", "cloud_networks", "clusters", "conditions", "configuration_script_payloads", "configuration_script_sources", "container_deployments", "currencies", "data_stores", "events", "features", "flavors", "groups", "hosts", "instances", "load_balancers", "measures", "notifications", "orchestration_templates", "pictures", "policies", "policy_actions", "policy_profiles", "providers", "provision_dialogs", "provision_requests", "rates", "reports", "request_tasks", "requests", "resource_pools", "results", "roles", "security_groups", "servers", "service_catalogs", "service_dialogs", "service_orders", "service_requests", "service_templates", "services", "settings", "tags", "tasks", "templates", "tenants", "users", "virtual_templates", "vms", "zones" } # non-typical collections without "id" and "resources" COLLECTIONS_OMMITED = {"settings"} @pytest.mark.tier(3) @pytest.mark.parametrize("collection_name", COLLECTIONS_ALL) @pytest.mark.uncollectif( lambda collection_name: (collection_name in COLLECTIONS_OMMITED) or (collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < "5.8") or (collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= "5.9") ) def test_query_simple_collections(appliance, collection_name): """This test tries to load each of the listed collections. 'Simple' collection means that they have no usable actions that we could try to run Steps: * GET /api/<collection_name> Metadata: test_flag: rest """ collection = getattr(appliance.rest_api.collections, collection_name) assert_response(appliance) collection.reload() list(collection) # collections affected by BZ 1437201 in versions < 5.9 COLLECTIONS_BUGGY_ATTRS = {"results", "service_catalogs", "automate", "categories", "roles"} @pytest.mark.tier(3) @pytest.mark.parametrize("collection_name", COLLECTIONS_ALL) @pytest.mark.uncollectif( lambda collection_name: (collection_name in COLLECTIONS_OMMITED) or (collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < "5.8") or (collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= "5.9") ) @pytest.mark.meta(blockers=['GH#ManageIQ/manageiq:15754']) def test_select_attributes(appliance, collection_name): """Tests that it's possible to limit returned attributes. Metadata: test_flag: rest """ if collection_name in COLLECTIONS_BUGGY_ATTRS and current_version() < '5.9': pytest.skip("Affected by BZ 1437201, cannot test.") collection = getattr(appliance.rest_api.collections, collection_name) response = appliance.rest_api.get( '{}{}'.format(collection._href, '?expand=resources&attributes=id')) assert_response(appliance) for resource in response['resources']: assert 'id' in resource expected_len = 2 if 'href' in resource else 1 assert len(resource) == expected_len @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_add_picture(appliance): """Tests adding picture. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.pictures count = collection.count collection.action.create({ "extension": "png", "content": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcS" "JAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="}) assert_response(appliance) collection.reload() assert collection.count == count + 1 @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_add_picture_invalid_extension(appliance): """Tests adding picture with invalid extension. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.pictures count = collection.count with error.expected('Extension must be'): collection.action.create({ "extension": "xcf", "content": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcS" "JAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="}) assert_response(appliance, http_status=400) collection.reload() assert collection.count == count @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_add_picture_invalid_data(appliance): """Tests adding picture with invalid content. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.pictures count = collection.count with error.expected('invalid base64'): collection.action.create({ "extension": "png", "content": "invalid"}) assert_response(appliance, http_status=400) collection.reload() assert collection.count == count @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_http_options(appliance): """Tests OPTIONS http method. Metadata: test_flag: rest """ assert 'boot_time' in appliance.rest_api.collections.vms.options()['attributes'] assert_response(appliance) @pytest.mark.uncollectif(lambda: current_version() < '5.8') @pytest.mark.parametrize("collection_name", ["hosts", "clusters"]) def test_http_options_node_types(appliance, collection_name): """Tests that OPTIONS http method on Hosts and Clusters collection returns node_types. Metadata: test_flag: rest """ collection = getattr(appliance.rest_api.collections, collection_name) assert 'node_types' in collection.options()['data'] assert_response(appliance) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_http_options_subcollections(appliance): """Tests that OPTIONS returns supported subcollections. Metadata: test_flag: rest """ assert 'tags' in appliance.rest_api.collections.vms.options()['subcollections'] assert_response(appliance) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_server_info(appliance): """Check that server info is present. Metadata: test_flag: rest """ assert all(item in appliance.rest_api.server_info for item in ('appliance', 'build', 'version')) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_server_info_href(appliance): """Check that appliance's server, zone and region is present. Metadata: test_flag: rest """ items = ('server_href', 'zone_href', 'region_href') for item in items: assert item in appliance.rest_api.server_info assert 'id' in appliance.rest_api.get(appliance.rest_api.server_info[item]) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_default_region(appliance): """Check that the default region is present. Metadata: test_flag: rest """ reg = appliance.rest_api.collections.regions[0] assert hasattr(reg, 'guid') assert hasattr(reg, 'region') @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_product_info(appliance): """Check that product info is present. Metadata: test_flag: rest """ assert all(item in appliance.rest_api.product_info for item in ('copyright', 'name', 'name_full', 'support_website', 'support_website_text')) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_identity(appliance): """Check that user's identity is present. Metadata: test_flag: rest """ assert all(item in appliance.rest_api.identity for item in ('userid', 'name', 'group', 'role', 'tenant', 'groups')) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_user_settings(appliance): """Check that user's settings are returned. Metadata: test_flag: rest """ assert isinstance(appliance.rest_api.settings, dict) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_datetime_filtering(appliance, a_provider): """Tests support for DateTime filtering with timestamps in YYYY-MM-DDTHH:MM:SSZ format. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.vms url_string = '{}{}'.format( collection._href, '?expand=resources&attributes=created_on&sort_by=created_on&sort_order=asc' '&filter[]=created_on{}{}') vms_num = len(collection) assert vms_num > 3 baseline_vm = collection[vms_num / 2] baseline_datetime = baseline_vm._data['created_on'] # YYYY-MM-DDTHH:MM:SSZ def _get_filtered_resources(operator): return appliance.rest_api.get(url_string.format(operator, baseline_datetime))['resources'] older_resources = _get_filtered_resources('<') newer_resources = _get_filtered_resources('>') matching_resources = _get_filtered_resources('=') # this will fail once BZ1437529 is fixed # should be: ``assert matching_resources`` assert not matching_resources if older_resources: last_older = collection.get(id=older_resources[-1]['id']) assert last_older.created_on < baseline_vm.created_on if newer_resources: first_newer = collection.get(id=newer_resources[0]['id']) # this will fail once BZ1437529 is fixed # should be: ``assert first_newer.created_on > baseline_vm.created_on`` assert first_newer.created_on == baseline_vm.created_on @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_date_filtering(appliance, a_provider): """Tests support for DateTime filtering with timestamps in YYYY-MM-DD format. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.vms url_string = '{}{}'.format( collection._href, '?expand=resources&attributes=created_on&sort_by=created_on&sort_order=desc' '&filter[]=created_on{}{}') vms_num = len(collection) assert vms_num > 3 baseline_vm = collection[vms_num / 2] baseline_date, _ = baseline_vm._data['created_on'].split('T') # YYYY-MM-DD def _get_filtered_resources(operator): return appliance.rest_api.get(url_string.format(operator, baseline_date))['resources'] older_resources = _get_filtered_resources('<') newer_resources = _get_filtered_resources('>') matching_resources = _get_filtered_resources('=') assert matching_resources if newer_resources: last_newer = collection.get(id=newer_resources[-1]['id']) assert last_newer.created_on > baseline_vm.created_on if older_resources: first_older = collection.get(id=older_resources[0]['id']) assert first_older.created_on < baseline_vm.created_on @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_resources_hiding(appliance): """Test that it's possible to hide resources in response. Metadata: test_flag: rest """ roles = appliance.rest_api.collections.roles resources_visible = appliance.rest_api.get(roles._href + '?filter[]=read_only=true') assert_response(appliance) assert 'resources' in resources_visible resources_hidden = appliance.rest_api.get( roles._href + '?filter[]=read_only=true&hide=resources') assert_response(appliance) assert 'resources' not in resources_hidden assert resources_hidden['subcount'] == resources_visible['subcount'] @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_sorting_by_attributes(appliance): """Test that it's possible to sort resources by attributes. Metadata: test_flag: rest """ url_string = '{}{}'.format( appliance.rest_api.collections.groups._href, '?expand=resources&attributes=id&sort_by=id&sort_order={}') response_asc = appliance.rest_api.get(url_string.format('asc')) assert_response(appliance) assert 'resources' in response_asc response_desc = appliance.rest_api.get(url_string.format('desc')) assert_response(appliance) assert 'resources' in response_desc assert response_asc['subcount'] == response_desc['subcount'] id_last = 0 for resource in response_asc['resources']: assert resource['id'] > id_last id_last = resource['id'] id_last += 1 for resource in response_desc['resources']: assert resource['id'] < id_last id_last = resource['id'] @pytest.mark.uncollectif(lambda: current_version() < '5.8') @pytest.mark.parametrize('vendor', ['Microsoft', 'Redhat', 'Vmware']) def test_collection_class_valid(appliance, a_provider, vendor): """Tests that it's possible to query using collection_class. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.vms resource_type = collection[0].type tested_type = 'ManageIQ::Providers::{}::InfraManager::Vm'.format(vendor) response = collection.query_string(collection_class=tested_type) if resource_type == tested_type: assert response.count > 0 # all returned entities must have the same type if response.count: rand_num = 5 if response.count >= 5 else response.count rand_entities = random.sample(response, rand_num) for entity in rand_entities: assert entity.type == tested_type @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_collection_class_invalid(appliance): """Tests that it's not possible to query using invalid collection_class. Metadata: test_flag: rest """ with error.expected('Invalid collection_class'): appliance.rest_api.collections.vms.query_string( collection_class='ManageIQ::Providers::Nonexistent::Vm') class TestBulkQueryRESTAPI(object): @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query(self, appliance): """Tests bulk query referencing resources by attributes id, href and guid Metadata: test_flag: rest """ collection = appliance.rest_api.collections.events data0, data1, data2 = collection[0]._data, collection[1]._data, collection[2]._data response = appliance.rest_api.collections.events.action.query( {'id': data0['id']}, {'href': data1['href']}, {'guid': data2['guid']}) assert_response(appliance) assert len(response) == 3 assert (data0 == response[0]._data and data1 == response[1]._data and data2 == response[2]._data) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query_users(self, appliance): """Tests bulk query on 'users' collection Metadata: test_flag: rest """ data = appliance.rest_api.collections.users[0]._data response = appliance.rest_api.collections.users.action.query( {'name': data['name']}, {'userid': data['userid']}) assert_response(appliance) assert len(response) == 2 assert data['id'] == response[0]._data['id'] == response[1]._data['id'] @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query_roles(self, appliance): """Tests bulk query on 'roles' collection Metadata: test_flag: rest """ collection = appliance.rest_api.collections.roles data0, data1 = collection[0]._data, collection[1]._data response = appliance.rest_api.collections.roles.action.query( {'name': data0['name']}, {'name': data1['name']}) assert_response(appliance) assert len(response) == 2 assert data0 == response[0]._data and data1 == response[1]._data @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query_groups(self, appliance): """Tests bulk query on 'groups' collection Metadata: test_flag: rest """ collection = appliance.rest_api.collections.groups data0, data1 = collection[0]._data, collection[1]._data response = appliance.rest_api.collections.groups.action.query( {'description': data0['description']}, {'description': data1['description']}) assert_response(appliance) assert len(response) == 2 assert data0 == response[0]._data and data1 == response[1]._data class TestArbitrationSettingsRESTAPI(object): @pytest.fixture(scope='function') def arbitration_settings(self, request, appliance): num_settings = 2 response = _arbitration_settings(request, appliance.rest_api, num=num_settings) assert_response(appliance) assert len(response) == num_settings return response @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_create_arbitration_settings(self, appliance, arbitration_settings): """Tests create arbitration settings. Metadata: test_flag: rest """ for setting in arbitration_settings: record = appliance.rest_api.collections.arbitration_settings.get(id=setting.id) assert record._data == setting._data @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize('method', ['post', 'delete']) def test_delete_arbitration_settings_from_detail(self, appliance, arbitration_settings, method): """Tests delete arbitration settings from detail. Metadata: test_flag: rest """ for setting in arbitration_settings: setting.action.delete(force_method=method) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): setting.action.delete(force_method=method) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_delete_arbitration_settings_from_collection(self, appliance, arbitration_settings): """Tests delete arbitration settings from collection. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.arbitration_settings collection.action.delete(*arbitration_settings) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): collection.action.delete(*arbitration_settings) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize( "from_detail", [True, False], ids=["from_detail", "from_collection"]) def test_edit_arbitration_settings(self, appliance, arbitration_settings, from_detail): """Tests edit arbitration settings. Metadata: test_flag: rest """ num_settings = len(arbitration_settings) uniq = [fauxfactory.gen_alphanumeric(5) for _ in range(num_settings)] new = [{'name': 'test_edit{}'.format(u), 'display_name': 'Test Edit{}'.format(u)} for u in uniq] if from_detail: edited = [] for i in range(num_settings): edited.append(arbitration_settings[i].action.edit(**new[i])) assert_response(appliance) else: for i in range(num_settings): new[i].update(arbitration_settings[i]._ref_repr()) edited = appliance.rest_api.collections.arbitration_settings.action.edit(*new) assert_response(appliance) assert len(edited) == num_settings for i in range(num_settings): assert (edited[i].name == new[i]['name'] and edited[i].display_name == new[i]['display_name']) class TestArbitrationRulesRESTAPI(object): @pytest.fixture(scope='function') def arbitration_rules(self, request, appliance): num_rules = 2 response = _arbitration_rules(request, appliance.rest_api, num=num_rules) assert_response(appliance) assert len(response) == num_rules return response @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') def test_create_arbitration_rules(self, arbitration_rules, appliance): """Tests create arbitration rules. Metadata: test_flag: rest """ for rule in arbitration_rules: record = appliance.rest_api.collections.arbitration_rules.get(id=rule.id) assert record.description == rule.description # there's no test for the DELETE method as it is not working and won't be fixed, see BZ 1410504 @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') def test_delete_arbitration_rules_from_detail_post(self, arbitration_rules, appliance): """Tests delete arbitration rules from detail. Metadata: test_flag: rest """ for entity in arbitration_rules: entity.action.delete.POST() assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): entity.action.delete.POST() assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') def test_delete_arbitration_rules_from_collection(self, arbitration_rules, appliance): """Tests delete arbitration rules from collection. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.arbitration_rules collection.action.delete(*arbitration_rules) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): collection.action.delete(*arbitration_rules) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') @pytest.mark.parametrize( 'from_detail', [True, False], ids=['from_detail', 'from_collection']) def test_edit_arbitration_rules(self, arbitration_rules, appliance, from_detail): """Tests edit arbitration rules. Metadata: test_flag: rest """ num_rules = len(arbitration_rules) uniq = [fauxfactory.gen_alphanumeric(5) for _ in range(num_rules)] new = [{'description': 'new test admin rule {}'.format(u)} for u in uniq] if from_detail: edited = [] for i in range(num_rules): edited.append(arbitration_rules[i].action.edit(**new[i])) assert_response(appliance) else: for i in range(num_rules): new[i].update(arbitration_rules[i]._ref_repr()) edited = appliance.rest_api.collections.arbitration_rules.action.edit(*new) assert_response(appliance) assert len(edited) == num_rules for i in range(num_rules): assert edited[i].description == new[i]['description'] class TestNotificationsRESTAPI(object): @pytest.fixture(scope='function') def generate_notifications(self, appliance): requests_data = automation_requests_data('nonexistent_vm') requests = appliance.rest_api.collections.automation_requests.action.create( *requests_data[:2]) assert len(requests) == 2 wait_for_requests(requests) @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize( 'from_detail', [True, False], ids=['from_detail', 'from_collection']) def test_mark_notifications(self, appliance, generate_notifications, from_detail): """Tests marking notifications as seen. Metadata: test_flag: rest """ unseen = appliance.rest_api.collections.notifications.find_by(seen=False) notifications = [unseen[-i] for i in range(1, 3)] if from_detail: for ent in notifications: ent.action.mark_as_seen() assert_response(appliance) else: appliance.rest_api.collections.notifications.action.mark_as_seen(*notifications) assert_response(appliance) for ent in notifications: ent.reload() assert ent.seen @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize('method', ['post', 'delete']) def test_delete_notifications_from_detail(self, appliance, generate_notifications, method): """Tests delete notifications from detail. Metadata: test_flag: rest """ if method == 'delete' and BZ('1420872', forced_streams=['5.7', '5.8', 'upstream']).blocks: pytest.skip("Affected by BZ1420872, cannot test.") collection = appliance.rest_api.collections.notifications collection.reload() notifications = [collection[-i] for i in range(1, 3)] for entity in notifications: entity.action.delete(force_method=method) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): entity.action.delete(force_method=method) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_delete_notifications_from_collection(self, appliance, generate_notifications): """Tests delete notifications from collection. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.notifications collection.reload() notifications = [collection[-i] for i in range(1, 3)] collection.action.delete(*notifications) assert_response(appliance) with error.expected("ActiveRecord::RecordNotFound"): collection.action.delete(*notifications) assert_response(appliance, http_status=404)
dajohnso/cfme_tests
cfme/tests/test_rest.py
cfme/automate/simulation.py
from collections import Mapping from contextlib import contextmanager from itertools import izip from cached_property import cached_property from sqlalchemy import MetaData, create_engine, event, inspect from sqlalchemy.exc import ArgumentError, DisconnectionError, InvalidRequestError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from sqlalchemy.pool import Pool from fixtures.pytest_store import store from utils import conf from utils.log import logger @event.listens_for(Pool, "checkout") def ping_connection(dbapi_connection, connection_record, connection_proxy): """ping_connection event hook, used to reconnect db sessions that time out Note: See also: :ref:`Connection Invalidation <sqlalchemy:pool_connection_invalidation>` """ cursor = dbapi_connection.cursor() try: cursor.execute("SELECT 1") except StandardError: raise DisconnectionError cursor.close() class Db(Mapping): """Helper class for interacting with a CFME database using SQLAlchemy Args: hostname: base url to be used (default is from current_appliance) credentials: name of credentials to use from :py:attr:`utils.conf.credentials` (default ``database``) Provides convient attributes to common sqlalchemy objects related to this DB, as well as a Mapping interface to access and reflect database tables. Where possible, attributes are cached. Db objects support getting tables by name via the mapping interface:: table = db['table_name'] Usage: # Usually used to query the DB for info, here's a common query for vm in db.session.query(db['vms']).all(): print(vm.name) print(vm.guid) # List comprehension to get all templates [(vm.name, vm.guid) for vm in session.query(db['vms']).all() if vm.template is True] # Use the transaction manager for write operations: with db.transaction: db.session.query(db['vms']).all().delete() Note: Creating a table object requires a call to the database so that SQLAlchemy can do reflection to determine the table's structure (columns, keys, indices, etc). On a latent connection, this can be extremely slow, which will affect methods that return tables, like the mapping interface or :py:meth:`values`. """ def __init__(self, hostname=None, credentials=None, port=None): self._table_cache = {} self.hostname = hostname or store.current_appliance.db.address self.port = port or store.current_appliance.db_port self.credentials = credentials or conf.credentials['database'] def __getitem__(self, table_name): """Access tables as items contained in this db Usage: # To get a table called 'table_name': db['table_name'] This may return ``None`` in the case where a table is found but reflection fails. """ try: return self._table(table_name) except InvalidRequestError: raise KeyError('Table {} could not be found'.format(table_name)) def __iter__(self): """Iterator of table names in this db""" return self.keys() def __len__(self): """Number of tables in this db""" return len(self.table_names) def __contains__(self, table_name): """Whether or not the named table is in this db""" return table_name in self.table_names def keys(self): """Iterator of table names in this db""" return (table_name for table_name in self.table_names) def items(self): """Iterator of ``(table_name, table)`` pairs""" return izip(self.keys(), self.values()) def values(self): """Iterator of tables in this db""" return (self[table_name] for table_name in self.table_names) def get(self, table_name, default=None): """table getter Args: table_name: Name of the table to get default: Default value to return if ``table_name`` is not found. Returns: a table if ``table_name`` exists, otherwise 'None' or the passed-in default """ try: return self[table_name] except KeyError: return default def copy(self): """Copy this database instance, keeping the same credentials and hostname""" return type(self)(self.hostname, self.credentials) def __eq__(self, other): """Check if this db is equal to another db""" try: return self.hostname == other.hostname except: return False def __ne__(self, other): """Check if this db is not equal to another db""" return not self == other @cached_property def engine(self): """The :py:class:`Engine <sqlalchemy:sqlalchemy.engine.Engine>` for this database It uses pessimistic disconnection handling, checking that the database is still connected before executing commands. """ return create_engine(self.db_url, echo_pool=True) @cached_property def sessionmaker(self): """A :py:class:`sessionmaker <sqlalchemy:sqlalchemy.orm.session.sessionmaker>` Used to make new sessions with this database, as needed. """ return sessionmaker(bind=self.engine) @cached_property def table_base(self): """Base class for all tables returned by this database This base class is created using :py:class:`declarative_base <sqlalchemy:sqlalchemy.ext.declarative.declarative_base>`. """ return declarative_base(metadata=self.metadata) @cached_property def metadata(self): """:py:class:`MetaData <sqlalchemy:sqlalchemy.schema.MetaData>` for this database This can be used for introspection of reflected items. Note: Tables that haven't been reflected won't show up in metadata. To reflect a table, use :py:meth:`reflect_table`. """ return MetaData(bind=self.engine) @cached_property def db_url(self): """The connection URL for this database, including credentials""" template = "postgresql://{username}:{password}@{host}:{port}/vmdb_production" result = template.format(host=self.hostname, port=self.port, **self.credentials) logger.info("[DB] db_url is %s", result) return result @cached_property def table_names(self): """A sorted list of table names available in this database.""" # rails table names follow similar rules as pep8 identifiers; expose them as such return sorted(inspect(self.engine).get_table_names()) @cached_property def session(self): """Returns a :py:class:`Session <sqlalchemy:sqlalchemy.orm.session.Session>` This is used for database queries. For writing to the database, start a :py:meth:`transaction`. Note: This attribute is cached. In cases where a new session needs to be explicitly created, use :py:meth:`sessionmaker`. """ return self.sessionmaker(autocommit=True) @property @contextmanager def transaction(self): """Context manager for simple transaction management Sessions understand the concept of transactions, and provider context managers to handle conditionally committing or rolling back transactions as needed. Note: Sessions automatically commit transactions by default. For predictable results when writing to the database, use the transaction manager. Usage: with db.transaction: db.session.do_something() """ with self.session.begin(): yield def reflect_table(self, table_name): """Populate :py:attr:`metadata` with information on a table Args: table_name: The name of a table to reflect """ self.metadata.reflect(only=[table_name]) def _table(self, table_name): """Retrieves, reflects, and caches table objects Actual implementation of __getitem__ """ try: return self._table_cache[table_name] except KeyError: self.reflect_table(table_name) table = self.metadata.tables[table_name] table_dict = { '__table__': table, '__tablename__': table_name } try: table_cls = type(str(table_name), (self.table_base,), table_dict) self._table_cache[table_name] = table_cls return table_cls except ArgumentError: # This usually happens on join tables with no PKs logger.info('Unable to create table class for table "{}"'.format(table_name)) return None @contextmanager def database_on_server(hostname, **kwargs): db_obj = Db(hostname=hostname, **kwargs) yield db_obj
# -*- coding: utf-8 -*- """This module contains REST API specific tests.""" import random import pytest import fauxfactory from cfme import test_requirements from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.infrastructure.provider.virtualcenter import VMwareProvider from cfme.rest.gen_data import arbitration_rules as _arbitration_rules from cfme.rest.gen_data import arbitration_settings as _arbitration_settings from cfme.rest.gen_data import automation_requests_data from cfme.rest.gen_data import vm as _vm from fixtures.provider import setup_one_or_skip from utils import error from utils.blockers import BZ from utils.providers import ProviderFilter from utils.rest import assert_response from utils.version import current_version from utils.wait import wait_for, wait_for_decorator pytestmark = [test_requirements.rest] @pytest.fixture(scope="module") def a_provider(request): pf = ProviderFilter(classes=[VMwareProvider, RHEVMProvider]) return setup_one_or_skip(request, filters=[pf]) @pytest.fixture(scope="function") def vm(request, a_provider, appliance): return _vm(request, a_provider, appliance.rest_api) def wait_for_requests(requests): def _finished(): for request in requests: request.reload() if request.request_state != 'finished': return False return True wait_for(_finished, num_sec=45, delay=5, message="requests finished") @pytest.mark.tier(2) @pytest.mark.parametrize( "from_detail", [True, False], ids=["from_detail", "from_collection"]) def test_vm_scan(appliance, vm, from_detail): rest_vm = appliance.rest_api.collections.vms.get(name=vm) if from_detail: response = rest_vm.action.scan() else: response, = appliance.rest_api.collections.vms.action.scan(rest_vm) assert_response(appliance) @wait_for_decorator(timeout="5m", delay=5, message="REST running scanning vm finishes") def _finished(): response.task.reload() if response.task.status.lower() in {"error"}: pytest.fail("Error when running scan vm method: `{}`".format(response.task.message)) return response.task.state.lower() == 'finished' COLLECTIONS_ADDED_IN_58 = { "actions", "alert_definitions", "alerts", "authentications", "configuration_script_payloads", "configuration_script_sources", "load_balancers", } COLLECTIONS_REMOVED_IN_59 = { "arbitration_settings", "arbitration_profiles", "virtual_templates", "arbitration_rules", } COLLECTIONS_ALL = { "actions", "alert_definitions", "alerts", "arbitration_profiles", "arbitration_rules", "arbitration_settings", "authentications", "automate", "automate_domains", "automation_requests", "availability_zones", "blueprints", "categories", "chargebacks", "cloud_networks", "clusters", "conditions", "configuration_script_payloads", "configuration_script_sources", "container_deployments", "currencies", "data_stores", "events", "features", "flavors", "groups", "hosts", "instances", "load_balancers", "measures", "notifications", "orchestration_templates", "pictures", "policies", "policy_actions", "policy_profiles", "providers", "provision_dialogs", "provision_requests", "rates", "reports", "request_tasks", "requests", "resource_pools", "results", "roles", "security_groups", "servers", "service_catalogs", "service_dialogs", "service_orders", "service_requests", "service_templates", "services", "settings", "tags", "tasks", "templates", "tenants", "users", "virtual_templates", "vms", "zones" } # non-typical collections without "id" and "resources" COLLECTIONS_OMMITED = {"settings"} @pytest.mark.tier(3) @pytest.mark.parametrize("collection_name", COLLECTIONS_ALL) @pytest.mark.uncollectif( lambda collection_name: (collection_name in COLLECTIONS_OMMITED) or (collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < "5.8") or (collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= "5.9") ) def test_query_simple_collections(appliance, collection_name): """This test tries to load each of the listed collections. 'Simple' collection means that they have no usable actions that we could try to run Steps: * GET /api/<collection_name> Metadata: test_flag: rest """ collection = getattr(appliance.rest_api.collections, collection_name) assert_response(appliance) collection.reload() list(collection) # collections affected by BZ 1437201 in versions < 5.9 COLLECTIONS_BUGGY_ATTRS = {"results", "service_catalogs", "automate", "categories", "roles"} @pytest.mark.tier(3) @pytest.mark.parametrize("collection_name", COLLECTIONS_ALL) @pytest.mark.uncollectif( lambda collection_name: (collection_name in COLLECTIONS_OMMITED) or (collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < "5.8") or (collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= "5.9") ) @pytest.mark.meta(blockers=['GH#ManageIQ/manageiq:15754']) def test_select_attributes(appliance, collection_name): """Tests that it's possible to limit returned attributes. Metadata: test_flag: rest """ if collection_name in COLLECTIONS_BUGGY_ATTRS and current_version() < '5.9': pytest.skip("Affected by BZ 1437201, cannot test.") collection = getattr(appliance.rest_api.collections, collection_name) response = appliance.rest_api.get( '{}{}'.format(collection._href, '?expand=resources&attributes=id')) assert_response(appliance) for resource in response['resources']: assert 'id' in resource expected_len = 2 if 'href' in resource else 1 assert len(resource) == expected_len @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_add_picture(appliance): """Tests adding picture. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.pictures count = collection.count collection.action.create({ "extension": "png", "content": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcS" "JAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="}) assert_response(appliance) collection.reload() assert collection.count == count + 1 @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_add_picture_invalid_extension(appliance): """Tests adding picture with invalid extension. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.pictures count = collection.count with error.expected('Extension must be'): collection.action.create({ "extension": "xcf", "content": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcS" "JAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="}) assert_response(appliance, http_status=400) collection.reload() assert collection.count == count @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_add_picture_invalid_data(appliance): """Tests adding picture with invalid content. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.pictures count = collection.count with error.expected('invalid base64'): collection.action.create({ "extension": "png", "content": "invalid"}) assert_response(appliance, http_status=400) collection.reload() assert collection.count == count @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_http_options(appliance): """Tests OPTIONS http method. Metadata: test_flag: rest """ assert 'boot_time' in appliance.rest_api.collections.vms.options()['attributes'] assert_response(appliance) @pytest.mark.uncollectif(lambda: current_version() < '5.8') @pytest.mark.parametrize("collection_name", ["hosts", "clusters"]) def test_http_options_node_types(appliance, collection_name): """Tests that OPTIONS http method on Hosts and Clusters collection returns node_types. Metadata: test_flag: rest """ collection = getattr(appliance.rest_api.collections, collection_name) assert 'node_types' in collection.options()['data'] assert_response(appliance) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_http_options_subcollections(appliance): """Tests that OPTIONS returns supported subcollections. Metadata: test_flag: rest """ assert 'tags' in appliance.rest_api.collections.vms.options()['subcollections'] assert_response(appliance) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_server_info(appliance): """Check that server info is present. Metadata: test_flag: rest """ assert all(item in appliance.rest_api.server_info for item in ('appliance', 'build', 'version')) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_server_info_href(appliance): """Check that appliance's server, zone and region is present. Metadata: test_flag: rest """ items = ('server_href', 'zone_href', 'region_href') for item in items: assert item in appliance.rest_api.server_info assert 'id' in appliance.rest_api.get(appliance.rest_api.server_info[item]) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_default_region(appliance): """Check that the default region is present. Metadata: test_flag: rest """ reg = appliance.rest_api.collections.regions[0] assert hasattr(reg, 'guid') assert hasattr(reg, 'region') @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_product_info(appliance): """Check that product info is present. Metadata: test_flag: rest """ assert all(item in appliance.rest_api.product_info for item in ('copyright', 'name', 'name_full', 'support_website', 'support_website_text')) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_identity(appliance): """Check that user's identity is present. Metadata: test_flag: rest """ assert all(item in appliance.rest_api.identity for item in ('userid', 'name', 'group', 'role', 'tenant', 'groups')) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_user_settings(appliance): """Check that user's settings are returned. Metadata: test_flag: rest """ assert isinstance(appliance.rest_api.settings, dict) @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_datetime_filtering(appliance, a_provider): """Tests support for DateTime filtering with timestamps in YYYY-MM-DDTHH:MM:SSZ format. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.vms url_string = '{}{}'.format( collection._href, '?expand=resources&attributes=created_on&sort_by=created_on&sort_order=asc' '&filter[]=created_on{}{}') vms_num = len(collection) assert vms_num > 3 baseline_vm = collection[vms_num / 2] baseline_datetime = baseline_vm._data['created_on'] # YYYY-MM-DDTHH:MM:SSZ def _get_filtered_resources(operator): return appliance.rest_api.get(url_string.format(operator, baseline_datetime))['resources'] older_resources = _get_filtered_resources('<') newer_resources = _get_filtered_resources('>') matching_resources = _get_filtered_resources('=') # this will fail once BZ1437529 is fixed # should be: ``assert matching_resources`` assert not matching_resources if older_resources: last_older = collection.get(id=older_resources[-1]['id']) assert last_older.created_on < baseline_vm.created_on if newer_resources: first_newer = collection.get(id=newer_resources[0]['id']) # this will fail once BZ1437529 is fixed # should be: ``assert first_newer.created_on > baseline_vm.created_on`` assert first_newer.created_on == baseline_vm.created_on @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_date_filtering(appliance, a_provider): """Tests support for DateTime filtering with timestamps in YYYY-MM-DD format. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.vms url_string = '{}{}'.format( collection._href, '?expand=resources&attributes=created_on&sort_by=created_on&sort_order=desc' '&filter[]=created_on{}{}') vms_num = len(collection) assert vms_num > 3 baseline_vm = collection[vms_num / 2] baseline_date, _ = baseline_vm._data['created_on'].split('T') # YYYY-MM-DD def _get_filtered_resources(operator): return appliance.rest_api.get(url_string.format(operator, baseline_date))['resources'] older_resources = _get_filtered_resources('<') newer_resources = _get_filtered_resources('>') matching_resources = _get_filtered_resources('=') assert matching_resources if newer_resources: last_newer = collection.get(id=newer_resources[-1]['id']) assert last_newer.created_on > baseline_vm.created_on if older_resources: first_older = collection.get(id=older_resources[0]['id']) assert first_older.created_on < baseline_vm.created_on @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_resources_hiding(appliance): """Test that it's possible to hide resources in response. Metadata: test_flag: rest """ roles = appliance.rest_api.collections.roles resources_visible = appliance.rest_api.get(roles._href + '?filter[]=read_only=true') assert_response(appliance) assert 'resources' in resources_visible resources_hidden = appliance.rest_api.get( roles._href + '?filter[]=read_only=true&hide=resources') assert_response(appliance) assert 'resources' not in resources_hidden assert resources_hidden['subcount'] == resources_visible['subcount'] @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_sorting_by_attributes(appliance): """Test that it's possible to sort resources by attributes. Metadata: test_flag: rest """ url_string = '{}{}'.format( appliance.rest_api.collections.groups._href, '?expand=resources&attributes=id&sort_by=id&sort_order={}') response_asc = appliance.rest_api.get(url_string.format('asc')) assert_response(appliance) assert 'resources' in response_asc response_desc = appliance.rest_api.get(url_string.format('desc')) assert_response(appliance) assert 'resources' in response_desc assert response_asc['subcount'] == response_desc['subcount'] id_last = 0 for resource in response_asc['resources']: assert resource['id'] > id_last id_last = resource['id'] id_last += 1 for resource in response_desc['resources']: assert resource['id'] < id_last id_last = resource['id'] @pytest.mark.uncollectif(lambda: current_version() < '5.8') @pytest.mark.parametrize('vendor', ['Microsoft', 'Redhat', 'Vmware']) def test_collection_class_valid(appliance, a_provider, vendor): """Tests that it's possible to query using collection_class. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.vms resource_type = collection[0].type tested_type = 'ManageIQ::Providers::{}::InfraManager::Vm'.format(vendor) response = collection.query_string(collection_class=tested_type) if resource_type == tested_type: assert response.count > 0 # all returned entities must have the same type if response.count: rand_num = 5 if response.count >= 5 else response.count rand_entities = random.sample(response, rand_num) for entity in rand_entities: assert entity.type == tested_type @pytest.mark.uncollectif(lambda: current_version() < '5.8') def test_collection_class_invalid(appliance): """Tests that it's not possible to query using invalid collection_class. Metadata: test_flag: rest """ with error.expected('Invalid collection_class'): appliance.rest_api.collections.vms.query_string( collection_class='ManageIQ::Providers::Nonexistent::Vm') class TestBulkQueryRESTAPI(object): @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query(self, appliance): """Tests bulk query referencing resources by attributes id, href and guid Metadata: test_flag: rest """ collection = appliance.rest_api.collections.events data0, data1, data2 = collection[0]._data, collection[1]._data, collection[2]._data response = appliance.rest_api.collections.events.action.query( {'id': data0['id']}, {'href': data1['href']}, {'guid': data2['guid']}) assert_response(appliance) assert len(response) == 3 assert (data0 == response[0]._data and data1 == response[1]._data and data2 == response[2]._data) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query_users(self, appliance): """Tests bulk query on 'users' collection Metadata: test_flag: rest """ data = appliance.rest_api.collections.users[0]._data response = appliance.rest_api.collections.users.action.query( {'name': data['name']}, {'userid': data['userid']}) assert_response(appliance) assert len(response) == 2 assert data['id'] == response[0]._data['id'] == response[1]._data['id'] @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query_roles(self, appliance): """Tests bulk query on 'roles' collection Metadata: test_flag: rest """ collection = appliance.rest_api.collections.roles data0, data1 = collection[0]._data, collection[1]._data response = appliance.rest_api.collections.roles.action.query( {'name': data0['name']}, {'name': data1['name']}) assert_response(appliance) assert len(response) == 2 assert data0 == response[0]._data and data1 == response[1]._data @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_bulk_query_groups(self, appliance): """Tests bulk query on 'groups' collection Metadata: test_flag: rest """ collection = appliance.rest_api.collections.groups data0, data1 = collection[0]._data, collection[1]._data response = appliance.rest_api.collections.groups.action.query( {'description': data0['description']}, {'description': data1['description']}) assert_response(appliance) assert len(response) == 2 assert data0 == response[0]._data and data1 == response[1]._data class TestArbitrationSettingsRESTAPI(object): @pytest.fixture(scope='function') def arbitration_settings(self, request, appliance): num_settings = 2 response = _arbitration_settings(request, appliance.rest_api, num=num_settings) assert_response(appliance) assert len(response) == num_settings return response @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_create_arbitration_settings(self, appliance, arbitration_settings): """Tests create arbitration settings. Metadata: test_flag: rest """ for setting in arbitration_settings: record = appliance.rest_api.collections.arbitration_settings.get(id=setting.id) assert record._data == setting._data @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize('method', ['post', 'delete']) def test_delete_arbitration_settings_from_detail(self, appliance, arbitration_settings, method): """Tests delete arbitration settings from detail. Metadata: test_flag: rest """ for setting in arbitration_settings: setting.action.delete(force_method=method) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): setting.action.delete(force_method=method) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_delete_arbitration_settings_from_collection(self, appliance, arbitration_settings): """Tests delete arbitration settings from collection. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.arbitration_settings collection.action.delete(*arbitration_settings) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): collection.action.delete(*arbitration_settings) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize( "from_detail", [True, False], ids=["from_detail", "from_collection"]) def test_edit_arbitration_settings(self, appliance, arbitration_settings, from_detail): """Tests edit arbitration settings. Metadata: test_flag: rest """ num_settings = len(arbitration_settings) uniq = [fauxfactory.gen_alphanumeric(5) for _ in range(num_settings)] new = [{'name': 'test_edit{}'.format(u), 'display_name': 'Test Edit{}'.format(u)} for u in uniq] if from_detail: edited = [] for i in range(num_settings): edited.append(arbitration_settings[i].action.edit(**new[i])) assert_response(appliance) else: for i in range(num_settings): new[i].update(arbitration_settings[i]._ref_repr()) edited = appliance.rest_api.collections.arbitration_settings.action.edit(*new) assert_response(appliance) assert len(edited) == num_settings for i in range(num_settings): assert (edited[i].name == new[i]['name'] and edited[i].display_name == new[i]['display_name']) class TestArbitrationRulesRESTAPI(object): @pytest.fixture(scope='function') def arbitration_rules(self, request, appliance): num_rules = 2 response = _arbitration_rules(request, appliance.rest_api, num=num_rules) assert_response(appliance) assert len(response) == num_rules return response @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') def test_create_arbitration_rules(self, arbitration_rules, appliance): """Tests create arbitration rules. Metadata: test_flag: rest """ for rule in arbitration_rules: record = appliance.rest_api.collections.arbitration_rules.get(id=rule.id) assert record.description == rule.description # there's no test for the DELETE method as it is not working and won't be fixed, see BZ 1410504 @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') def test_delete_arbitration_rules_from_detail_post(self, arbitration_rules, appliance): """Tests delete arbitration rules from detail. Metadata: test_flag: rest """ for entity in arbitration_rules: entity.action.delete.POST() assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): entity.action.delete.POST() assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') def test_delete_arbitration_rules_from_collection(self, arbitration_rules, appliance): """Tests delete arbitration rules from collection. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.arbitration_rules collection.action.delete(*arbitration_rules) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): collection.action.delete(*arbitration_rules) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7' or current_version() >= '5.9') @pytest.mark.parametrize( 'from_detail', [True, False], ids=['from_detail', 'from_collection']) def test_edit_arbitration_rules(self, arbitration_rules, appliance, from_detail): """Tests edit arbitration rules. Metadata: test_flag: rest """ num_rules = len(arbitration_rules) uniq = [fauxfactory.gen_alphanumeric(5) for _ in range(num_rules)] new = [{'description': 'new test admin rule {}'.format(u)} for u in uniq] if from_detail: edited = [] for i in range(num_rules): edited.append(arbitration_rules[i].action.edit(**new[i])) assert_response(appliance) else: for i in range(num_rules): new[i].update(arbitration_rules[i]._ref_repr()) edited = appliance.rest_api.collections.arbitration_rules.action.edit(*new) assert_response(appliance) assert len(edited) == num_rules for i in range(num_rules): assert edited[i].description == new[i]['description'] class TestNotificationsRESTAPI(object): @pytest.fixture(scope='function') def generate_notifications(self, appliance): requests_data = automation_requests_data('nonexistent_vm') requests = appliance.rest_api.collections.automation_requests.action.create( *requests_data[:2]) assert len(requests) == 2 wait_for_requests(requests) @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize( 'from_detail', [True, False], ids=['from_detail', 'from_collection']) def test_mark_notifications(self, appliance, generate_notifications, from_detail): """Tests marking notifications as seen. Metadata: test_flag: rest """ unseen = appliance.rest_api.collections.notifications.find_by(seen=False) notifications = [unseen[-i] for i in range(1, 3)] if from_detail: for ent in notifications: ent.action.mark_as_seen() assert_response(appliance) else: appliance.rest_api.collections.notifications.action.mark_as_seen(*notifications) assert_response(appliance) for ent in notifications: ent.reload() assert ent.seen @pytest.mark.uncollectif(lambda: current_version() < '5.7') @pytest.mark.parametrize('method', ['post', 'delete']) def test_delete_notifications_from_detail(self, appliance, generate_notifications, method): """Tests delete notifications from detail. Metadata: test_flag: rest """ if method == 'delete' and BZ('1420872', forced_streams=['5.7', '5.8', 'upstream']).blocks: pytest.skip("Affected by BZ1420872, cannot test.") collection = appliance.rest_api.collections.notifications collection.reload() notifications = [collection[-i] for i in range(1, 3)] for entity in notifications: entity.action.delete(force_method=method) assert_response(appliance) with error.expected('ActiveRecord::RecordNotFound'): entity.action.delete(force_method=method) assert_response(appliance, http_status=404) @pytest.mark.uncollectif(lambda: current_version() < '5.7') def test_delete_notifications_from_collection(self, appliance, generate_notifications): """Tests delete notifications from collection. Metadata: test_flag: rest """ collection = appliance.rest_api.collections.notifications collection.reload() notifications = [collection[-i] for i in range(1, 3)] collection.action.delete(*notifications) assert_response(appliance) with error.expected("ActiveRecord::RecordNotFound"): collection.action.delete(*notifications) assert_response(appliance, http_status=404)
dajohnso/cfme_tests
cfme/tests/test_rest.py
utils/db.py
""" DataFrame --------- An efficient 2D container for potentially mixed-type time series or other labeled data series. Similar to its R counterpart, data.frame, except providing automatic data alignment and a host of useful data manipulation methods having to do with the labeling information """ import collections from collections import OrderedDict, abc import functools from io import StringIO import itertools import sys import warnings from textwrap import dedent from typing import FrozenSet, List, Optional, Set, Type, Union import numpy as np import numpy.ma as ma from pandas._config import get_option from pandas._libs import lib, algos as libalgos from pandas.util._decorators import (Appender, Substitution, rewrite_axis_style_signature, deprecate_kwarg) from pandas.util._validators import (validate_bool_kwarg, validate_axis_style_args) from pandas.compat import PY36, raise_with_traceback from pandas.compat.numpy import function as nv from pandas.core.arrays.sparse import SparseFrameAccessor from pandas.core.dtypes.cast import ( maybe_upcast, cast_scalar_to_array, infer_dtype_from_scalar, maybe_cast_to_datetime, maybe_infer_to_datetimelike, maybe_convert_platform, maybe_downcast_to_dtype, invalidate_string_dtypes, coerce_to_dtypes, maybe_upcast_putmask, find_common_type) from pandas.core.dtypes.common import ( is_dict_like, is_datetime64tz_dtype, is_object_dtype, is_extension_type, is_extension_array_dtype, is_datetime64_any_dtype, is_bool_dtype, is_integer_dtype, is_float_dtype, is_integer, is_scalar, is_dtype_equal, needs_i8_conversion, infer_dtype_from_object, ensure_float64, ensure_int64, ensure_platform_int, is_list_like, is_nested_list_like, is_iterator, is_sequence, is_named_tuple) from pandas.core.dtypes.generic import ( ABCSeries, ABCDataFrame, ABCIndexClass, ABCMultiIndex) from pandas.core.dtypes.missing import isna, notna from pandas.core import algorithms from pandas.core import common as com from pandas.core import nanops from pandas.core import ops from pandas.core.accessor import CachedAccessor from pandas.core.arrays import Categorical, ExtensionArray from pandas.core.arrays.datetimelike import ( DatetimeLikeArrayMixin as DatetimeLikeArray ) from pandas.core.generic import NDFrame, _shared_docs from pandas.core.index import (Index, MultiIndex, ensure_index, ensure_index_from_sequences) from pandas.core.indexes import base as ibase from pandas.core.indexes.datetimes import DatetimeIndex from pandas.core.indexes.period import PeriodIndex from pandas.core.indexing import (maybe_droplevels, convert_to_index_sliceable, check_bool_indexer) from pandas.core.internals import BlockManager from pandas.core.internals.construction import ( masked_rec_array_to_mgr, get_names_from_index, to_arrays, reorder_arrays, init_ndarray, init_dict, arrays_to_mgr, sanitize_index) from pandas.core.series import Series from pandas.io.formats import console from pandas.io.formats import format as fmt from pandas.io.formats.printing import pprint_thing import pandas.plotting # --------------------------------------------------------------------- # Docstring templates _shared_doc_kwargs = dict( axes='index, columns', klass='DataFrame', axes_single_arg="{0 or 'index', 1 or 'columns'}", axis="""axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index': apply function to each column. If 1 or 'columns': apply function to each row.""", optional_by=""" by : str or list of str Name or list of names to sort by. - if `axis` is 0 or `'index'` then `by` may contain index levels and/or column labels - if `axis` is 1 or `'columns'` then `by` may contain column levels and/or index labels .. versionchanged:: 0.23.0 Allow specifying index or column level names.""", versionadded_to_excel='', optional_labels="""labels : array-like, optional New labels / index to conform the axis specified by 'axis' to.""", optional_axis="""axis : int or str, optional Axis to target. Can be either the axis name ('index', 'columns') or number (0, 1).""", ) _numeric_only_doc = """numeric_only : boolean, default None Include only float, int, boolean data. If None, will attempt to use everything, then use only numeric data """ _merge_doc = """ Merge DataFrame or named Series objects with a database-style join. The join is done on columns or indexes. If joining columns on columns, the DataFrame indexes *will be ignored*. Otherwise if joining indexes on indexes or indexes on a column or columns, the index will be passed on. Parameters ----------%s right : DataFrame or named Series Object to merge with. how : {'left', 'right', 'outer', 'inner'}, default 'inner' Type of merge to be performed. * left: use only keys from left frame, similar to a SQL left outer join; preserve key order. * right: use only keys from right frame, similar to a SQL right outer join; preserve key order. * outer: use union of keys from both frames, similar to a SQL full outer join; sort keys lexicographically. * inner: use intersection of keys from both frames, similar to a SQL inner join; preserve the order of the left keys. on : label or list Column or index level names to join on. These must be found in both DataFrames. If `on` is None and not merging on indexes then this defaults to the intersection of the columns in both DataFrames. left_on : label or list, or array-like Column or index level names to join on in the left DataFrame. Can also be an array or list of arrays of the length of the left DataFrame. These arrays are treated as if they are columns. right_on : label or list, or array-like Column or index level names to join on in the right DataFrame. Can also be an array or list of arrays of the length of the right DataFrame. These arrays are treated as if they are columns. left_index : bool, default False Use the index from the left DataFrame as the join key(s). If it is a MultiIndex, the number of keys in the other DataFrame (either the index or a number of columns) must match the number of levels. right_index : bool, default False Use the index from the right DataFrame as the join key. Same caveats as left_index. sort : bool, default False Sort the join keys lexicographically in the result DataFrame. If False, the order of the join keys depends on the join type (how keyword). suffixes : tuple of (str, str), default ('_x', '_y') Suffix to apply to overlapping column names in the left and right side, respectively. To raise an exception on overlapping columns use (False, False). copy : bool, default True If False, avoid copy if possible. indicator : bool or str, default False If True, adds a column to output DataFrame called "_merge" with information on the source of each row. If string, column with information on source of each row will be added to output DataFrame, and column will be named value of string. Information column is Categorical-type and takes on a value of "left_only" for observations whose merge key only appears in 'left' DataFrame, "right_only" for observations whose merge key only appears in 'right' DataFrame, and "both" if the observation's merge key is found in both. validate : str, optional If specified, checks if merge is of specified type. * "one_to_one" or "1:1": check if merge keys are unique in both left and right datasets. * "one_to_many" or "1:m": check if merge keys are unique in left dataset. * "many_to_one" or "m:1": check if merge keys are unique in right dataset. * "many_to_many" or "m:m": allowed, but does not result in checks. .. versionadded:: 0.21.0 Returns ------- DataFrame A DataFrame of the two merged objects. See Also -------- merge_ordered : Merge with optional filling/interpolation. merge_asof : Merge on nearest keys. DataFrame.join : Similar method using indices. Notes ----- Support for specifying index levels as the `on`, `left_on`, and `right_on` parameters was added in version 0.23.0 Support for merging named Series objects was added in version 0.24.0 Examples -------- >>> df1 = pd.DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [1, 2, 3, 5]}) >>> df2 = pd.DataFrame({'rkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [5, 6, 7, 8]}) >>> df1 lkey value 0 foo 1 1 bar 2 2 baz 3 3 foo 5 >>> df2 rkey value 0 foo 5 1 bar 6 2 baz 7 3 foo 8 Merge df1 and df2 on the lkey and rkey columns. The value columns have the default suffixes, _x and _y, appended. >>> df1.merge(df2, left_on='lkey', right_on='rkey') lkey value_x rkey value_y 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2 with specified left and right suffixes appended to any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', ... suffixes=('_left', '_right')) lkey value_left rkey value_right 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2, but raise an exception if the DataFrames have any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', suffixes=(False, False)) Traceback (most recent call last): ... ValueError: columns overlap but no suffix specified: Index(['value'], dtype='object') """ # ----------------------------------------------------------------------- # DataFrame class class DataFrame(NDFrame): """ Two-dimensional size-mutable, potentially heterogeneous tabular data structure with labeled axes (rows and columns). Arithmetic operations align on both row and column labels. Can be thought of as a dict-like container for Series objects. The primary pandas data structure. Parameters ---------- data : ndarray (structured or homogeneous), Iterable, dict, or DataFrame Dict can contain Series, arrays, constants, or list-like objects .. versionchanged :: 0.23.0 If data is a dict, argument order is maintained for Python 3.6 and later. index : Index or array-like Index to use for resulting frame. Will default to RangeIndex if no indexing information part of input data and no index provided columns : Index or array-like Column labels to use for resulting frame. Will default to RangeIndex (0, 1, 2, ..., n) if no column labels are provided dtype : dtype, default None Data type to force. Only a single dtype is allowed. If None, infer copy : boolean, default False Copy data from inputs. Only affects DataFrame / 2d ndarray input See Also -------- DataFrame.from_records : Constructor from tuples, also record arrays. DataFrame.from_dict : From dicts of Series, arrays, or dicts. DataFrame.from_items : From sequence of (key, value) pairs read_csv, pandas.read_table, pandas.read_clipboard. Examples -------- Constructing DataFrame from a dictionary. >>> d = {'col1': [1, 2], 'col2': [3, 4]} >>> df = pd.DataFrame(data=d) >>> df col1 col2 0 1 3 1 2 4 Notice that the inferred dtype is int64. >>> df.dtypes col1 int64 col2 int64 dtype: object To enforce a single dtype: >>> df = pd.DataFrame(data=d, dtype=np.int8) >>> df.dtypes col1 int8 col2 int8 dtype: object Constructing DataFrame from numpy ndarray: >>> df2 = pd.DataFrame(np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]), ... columns=['a', 'b', 'c']) >>> df2 a b c 0 1 2 3 1 4 5 6 2 7 8 9 """ @property def _constructor(self): return DataFrame _constructor_sliced = Series # type: Type[Series] _deprecations = NDFrame._deprecations | frozenset([ 'get_value', 'set_value', 'from_csv', 'from_items' ]) # type: FrozenSet[str] _accessors = set() # type: Set[str] @property def _constructor_expanddim(self): raise NotImplementedError("Not supported for DataFrames!") # ---------------------------------------------------------------------- # Constructors def __init__(self, data=None, index=None, columns=None, dtype=None, copy=False): if data is None: data = {} if dtype is not None: dtype = self._validate_dtype(dtype) if isinstance(data, DataFrame): data = data._data if isinstance(data, BlockManager): mgr = self._init_mgr(data, axes=dict(index=index, columns=columns), dtype=dtype, copy=copy) elif isinstance(data, dict): mgr = init_dict(data, index, columns, dtype=dtype) elif isinstance(data, ma.MaskedArray): import numpy.ma.mrecords as mrecords # masked recarray if isinstance(data, mrecords.MaskedRecords): mgr = masked_rec_array_to_mgr(data, index, columns, dtype, copy) # a masked array else: mask = ma.getmaskarray(data) if mask.any(): data, fill_value = maybe_upcast(data, copy=True) data.soften_mask() # set hardmask False if it was True data[mask] = fill_value else: data = data.copy() mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: data_columns = list(data.dtype.names) data = {k: data[k] for k in data_columns} if columns is None: columns = data_columns mgr = init_dict(data, index, columns, dtype=dtype) elif getattr(data, 'name', None) is not None: mgr = init_dict({data.name: data}, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) # For data is list-like, or Iterable (will consume into list) elif (isinstance(data, abc.Iterable) and not isinstance(data, (str, bytes))): if not isinstance(data, abc.Sequence): data = list(data) if len(data) > 0: if is_list_like(data[0]) and getattr(data[0], 'ndim', 1) == 1: if is_named_tuple(data[0]) and columns is None: columns = data[0]._fields arrays, columns = to_arrays(data, columns, dtype=dtype) columns = ensure_index(columns) # set the index if index is None: if isinstance(data[0], Series): index = get_names_from_index(data) elif isinstance(data[0], Categorical): index = ibase.default_index(len(data[0])) else: index = ibase.default_index(len(data)) mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) else: mgr = init_dict({}, index, columns, dtype=dtype) else: try: arr = np.array(data, dtype=dtype, copy=copy) except (ValueError, TypeError) as e: exc = TypeError('DataFrame constructor called with ' 'incompatible data and dtype: {e}'.format(e=e)) raise_with_traceback(exc) if arr.ndim == 0 and index is not None and columns is not None: values = cast_scalar_to_array((len(index), len(columns)), data, dtype=dtype) mgr = init_ndarray(values, index, columns, dtype=values.dtype, copy=False) else: raise ValueError('DataFrame constructor not properly called!') NDFrame.__init__(self, mgr, fastpath=True) # ---------------------------------------------------------------------- @property def axes(self): """ Return a list representing the axes of the DataFrame. It has the row axis labels and column axis labels as the only members. They are returned in that order. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.axes [RangeIndex(start=0, stop=2, step=1), Index(['col1', 'col2'], dtype='object')] """ return [self.index, self.columns] @property def shape(self): """ Return a tuple representing the dimensionality of the DataFrame. See Also -------- ndarray.shape Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.shape (2, 2) >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4], ... 'col3': [5, 6]}) >>> df.shape (2, 3) """ return len(self.index), len(self.columns) @property def _is_homogeneous_type(self): """ Whether all the columns in a DataFrame have the same type. Returns ------- bool Examples -------- >>> DataFrame({"A": [1, 2], "B": [3, 4]})._is_homogeneous_type True >>> DataFrame({"A": [1, 2], "B": [3.0, 4.0]})._is_homogeneous_type False Items with the same type but different sizes are considered different types. >>> DataFrame({ ... "A": np.array([1, 2], dtype=np.int32), ... "B": np.array([1, 2], dtype=np.int64)})._is_homogeneous_type False """ if self._data.any_extension_types: return len({block.dtype for block in self._data.blocks}) == 1 else: return not self._data.is_mixed_type # ---------------------------------------------------------------------- # Rendering Methods def _repr_fits_vertical_(self): """ Check length against max_rows. """ max_rows = get_option("display.max_rows") return len(self) <= max_rows def _repr_fits_horizontal_(self, ignore_width=False): """ Check if full repr fits in horizontal boundaries imposed by the display options width and max_columns. In case off non-interactive session, no boundaries apply. `ignore_width` is here so ipnb+HTML output can behave the way users expect. display.max_columns remains in effect. GH3541, GH3573 """ width, height = console.get_console_size() max_columns = get_option("display.max_columns") nb_columns = len(self.columns) # exceed max columns if ((max_columns and nb_columns > max_columns) or ((not ignore_width) and width and nb_columns > (width // 2))): return False # used by repr_html under IPython notebook or scripts ignore terminal # dims if ignore_width or not console.in_interactive_session(): return True if (get_option('display.width') is not None or console.in_ipython_frontend()): # check at least the column row for excessive width max_rows = 1 else: max_rows = get_option("display.max_rows") # when auto-detecting, so width=None and not in ipython front end # check whether repr fits horizontal by actually checking # the width of the rendered repr buf = StringIO() # only care about the stuff we'll actually print out # and to_string on entire frame may be expensive d = self if not (max_rows is None): # unlimited rows # min of two, where one may be None d = d.iloc[:min(max_rows, len(d))] else: return True d.to_string(buf=buf) value = buf.getvalue() repr_width = max(len(l) for l in value.split('\n')) return repr_width < width def _info_repr(self): """ True if the repr should show the info view. """ info_repr_option = (get_option("display.large_repr") == "info") return info_repr_option and not (self._repr_fits_horizontal_() and self._repr_fits_vertical_()) def __repr__(self): """ Return a string representation for a particular DataFrame. """ buf = StringIO("") if self._info_repr(): self.info(buf=buf) return buf.getvalue() max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") if get_option("display.expand_frame_repr"): width, _ = console.get_console_size() else: width = None self.to_string(buf=buf, max_rows=max_rows, max_cols=max_cols, line_width=width, show_dimensions=show_dimensions) return buf.getvalue() def _repr_html_(self): """ Return a html representation for a particular DataFrame. Mainly for IPython notebook. """ if self._info_repr(): buf = StringIO("") self.info(buf=buf) # need to escape the <class>, should be the first line. val = buf.getvalue().replace('<', r'&lt;', 1) val = val.replace('>', r'&gt;', 1) return '<pre>' + val + '</pre>' if get_option("display.notebook_repr_html"): max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") return self.to_html(max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, notebook=True) else: return None @Substitution(header='Write out the column names. If a list of strings ' 'is given, it is assumed to be aliases for the ' 'column names', col_space_type='int', col_space='The minimum width of each column') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_string(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', line_width=None): """ Render a DataFrame to a console-friendly tabular output. %(shared_params)s line_width : int, optional Width to wrap a line in characters. %(returns)s See Also -------- to_html : Convert DataFrame to HTML. Examples -------- >>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]} >>> df = pd.DataFrame(d) >>> print(df.to_string()) col1 col2 0 1 4 1 2 5 2 3 6 """ formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, line_width=line_width) formatter.to_string() if buf is None: result = formatter.buf.getvalue() return result # ---------------------------------------------------------------------- @property def style(self): """ Property returning a Styler object containing methods for building a styled HTML representation fo the DataFrame. See Also -------- io.formats.style.Styler """ from pandas.io.formats.style import Styler return Styler(self) def iteritems(self): r""" Iterator over (column name, Series) pairs. Iterates over the DataFrame columns, returning a tuple with the column name and the content as a Series. Yields ------ label : object The column names for the DataFrame being iterated over. content : Series The column entries belonging to each label, as a Series. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.itertuples : Iterate over DataFrame rows as namedtuples of the values. Examples -------- >>> df = pd.DataFrame({'species': ['bear', 'bear', 'marsupial'], ... 'population': [1864, 22000, 80000]}, ... index=['panda', 'polar', 'koala']) >>> df species population panda bear 1864 polar bear 22000 koala marsupial 80000 >>> for label, content in df.iteritems(): ... print('label:', label) ... print('content:', content, sep='\n') ... label: species content: panda bear polar bear koala marsupial Name: species, dtype: object label: population content: panda 1864 polar 22000 koala 80000 Name: population, dtype: int64 """ if self.columns.is_unique and hasattr(self, '_item_cache'): for k in self.columns: yield k, self._get_item_cache(k) else: for i, k in enumerate(self.columns): yield k, self._ixs(i, axis=1) def iterrows(self): """ Iterate over DataFrame rows as (index, Series) pairs. Yields ------ index : label or tuple of label The index of the row. A tuple for a `MultiIndex`. data : Series The data of the row as a Series. it : generator A generator that iterates over the rows of the frame. See Also -------- itertuples : Iterate over DataFrame rows as namedtuples of the values. iteritems : Iterate over (column name, Series) pairs. Notes ----- 1. Because ``iterrows`` returns a Series for each row, it does **not** preserve dtypes across the rows (dtypes are preserved across columns for DataFrames). For example, >>> df = pd.DataFrame([[1, 1.5]], columns=['int', 'float']) >>> row = next(df.iterrows())[1] >>> row int 1.0 float 1.5 Name: 0, dtype: float64 >>> print(row['int'].dtype) float64 >>> print(df['int'].dtype) int64 To preserve dtypes while iterating over the rows, it is better to use :meth:`itertuples` which returns namedtuples of the values and which is generally faster than ``iterrows``. 2. You should **never modify** something you are iterating over. This is not guaranteed to work in all cases. Depending on the data types, the iterator returns a copy and not a view, and writing to it will have no effect. """ columns = self.columns klass = self._constructor_sliced for k, v in zip(self.index, self.values): s = klass(v, index=columns, name=k) yield k, s def itertuples(self, index=True, name="Pandas"): """ Iterate over DataFrame rows as namedtuples. Parameters ---------- index : bool, default True If True, return the index as the first element of the tuple. name : str or None, default "Pandas" The name of the returned namedtuples or None to return regular tuples. Returns ------- iterator An object to iterate over namedtuples for each row in the DataFrame with the first field possibly being the index and following fields being the column values. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.iteritems : Iterate over (column name, Series) pairs. Notes ----- The column names will be renamed to positional names if they are invalid Python identifiers, repeated, or start with an underscore. With a large number of columns (>255), regular tuples are returned. Examples -------- >>> df = pd.DataFrame({'num_legs': [4, 2], 'num_wings': [0, 2]}, ... index=['dog', 'hawk']) >>> df num_legs num_wings dog 4 0 hawk 2 2 >>> for row in df.itertuples(): ... print(row) ... Pandas(Index='dog', num_legs=4, num_wings=0) Pandas(Index='hawk', num_legs=2, num_wings=2) By setting the `index` parameter to False we can remove the index as the first element of the tuple: >>> for row in df.itertuples(index=False): ... print(row) ... Pandas(num_legs=4, num_wings=0) Pandas(num_legs=2, num_wings=2) With the `name` parameter set we set a custom name for the yielded namedtuples: >>> for row in df.itertuples(name='Animal'): ... print(row) ... Animal(Index='dog', num_legs=4, num_wings=0) Animal(Index='hawk', num_legs=2, num_wings=2) """ arrays = [] fields = list(self.columns) if index: arrays.append(self.index) fields.insert(0, "Index") # use integer indexing because of possible duplicate column names arrays.extend(self.iloc[:, k] for k in range(len(self.columns))) # Python 3 supports at most 255 arguments to constructor if name is not None and len(self.columns) + index < 256: itertuple = collections.namedtuple(name, fields, rename=True) return map(itertuple._make, zip(*arrays)) # fallback to regular tuples return zip(*arrays) items = iteritems def __len__(self): """ Returns length of info axis, but here we use the index. """ return len(self.index) def dot(self, other): """ Compute the matrix multiplication between the DataFrame and other. This method computes the matrix product between the DataFrame and the values of an other Series, DataFrame or a numpy array. It can also be called using ``self @ other`` in Python >= 3.5. Parameters ---------- other : Series, DataFrame or array-like The other object to compute the matrix product with. Returns ------- Series or DataFrame If other is a Series, return the matrix product between self and other as a Serie. If other is a DataFrame or a numpy.array, return the matrix product of self and other in a DataFrame of a np.array. See Also -------- Series.dot: Similar method for Series. Notes ----- The dimensions of DataFrame and other must be compatible in order to compute the matrix multiplication. In addition, the column names of DataFrame and the index of other must contain the same values, as they will be aligned prior to the multiplication. The dot method for Series computes the inner product, instead of the matrix product here. Examples -------- Here we multiply a DataFrame with a Series. >>> df = pd.DataFrame([[0, 1, -2, -1], [1, 1, 1, 1]]) >>> s = pd.Series([1, 1, 2, 1]) >>> df.dot(s) 0 -4 1 5 dtype: int64 Here we multiply a DataFrame with another DataFrame. >>> other = pd.DataFrame([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(other) 0 1 0 1 4 1 2 2 Note that the dot method give the same result as @ >>> df @ other 0 1 0 1 4 1 2 2 The dot method works also if other is an np.array. >>> arr = np.array([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(arr) 0 1 0 1 4 1 2 2 Note how shuffling of the objects does not change the result. >>> s2 = s.reindex([1, 0, 2, 3]) >>> df.dot(s2) 0 -4 1 5 dtype: int64 """ if isinstance(other, (Series, DataFrame)): common = self.columns.union(other.index) if (len(common) > len(self.columns) or len(common) > len(other.index)): raise ValueError('matrices are not aligned') left = self.reindex(columns=common, copy=False) right = other.reindex(index=common, copy=False) lvals = left.values rvals = right.values else: left = self lvals = self.values rvals = np.asarray(other) if lvals.shape[1] != rvals.shape[0]: raise ValueError('Dot product shape mismatch, ' '{s} vs {r}'.format(s=lvals.shape, r=rvals.shape)) if isinstance(other, DataFrame): return self._constructor(np.dot(lvals, rvals), index=left.index, columns=other.columns) elif isinstance(other, Series): return Series(np.dot(lvals, rvals), index=left.index) elif isinstance(rvals, (np.ndarray, Index)): result = np.dot(lvals, rvals) if result.ndim == 2: return self._constructor(result, index=left.index) else: return Series(result, index=left.index) else: # pragma: no cover raise TypeError('unsupported type: {oth}'.format(oth=type(other))) def __matmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.dot(other) def __rmatmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.T.dot(np.transpose(other)).T # ---------------------------------------------------------------------- # IO methods (to / from other formats) @classmethod def from_dict(cls, data, orient='columns', dtype=None, columns=None): """ Construct DataFrame from dict of array-like or dicts. Creates DataFrame object from dictionary by columns or by index allowing dtype specification. Parameters ---------- data : dict Of the form {field : array-like} or {field : dict}. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the passed dict should be the columns of the resulting DataFrame, pass 'columns' (default). Otherwise if the keys should be rows, pass 'index'. dtype : dtype, default None Data type to force, otherwise infer. columns : list, default None Column labels to use when ``orient='index'``. Raises a ValueError if used with ``orient='columns'``. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- DataFrame.from_records : DataFrame from ndarray (structured dtype), list of tuples, dict, or DataFrame. DataFrame : DataFrame object creation using constructor. Examples -------- By default the keys of the dict become the DataFrame columns: >>> data = {'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data) col_1 col_2 0 3 a 1 2 b 2 1 c 3 0 d Specify ``orient='index'`` to create the DataFrame using dictionary keys as rows: >>> data = {'row_1': [3, 2, 1, 0], 'row_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data, orient='index') 0 1 2 3 row_1 3 2 1 0 row_2 a b c d When using the 'index' orientation, the column names can be specified manually: >>> pd.DataFrame.from_dict(data, orient='index', ... columns=['A', 'B', 'C', 'D']) A B C D row_1 3 2 1 0 row_2 a b c d """ index = None orient = orient.lower() if orient == 'index': if len(data) > 0: # TODO speed up Series case if isinstance(list(data.values())[0], (Series, dict)): data = _from_nested_dict(data) else: data, index = list(data.values()), list(data.keys()) elif orient == 'columns': if columns is not None: raise ValueError("cannot use columns parameter with " "orient='columns'") else: # pragma: no cover raise ValueError('only recognize index or columns for orient') return cls(data, index=index, columns=columns, dtype=dtype) def to_numpy(self, dtype=None, copy=False): """ Convert the DataFrame to a NumPy array. .. versionadded:: 0.24.0 By default, the dtype of the returned array will be the common NumPy dtype of all types in the DataFrame. For example, if the dtypes are ``float16`` and ``float32``, the results dtype will be ``float32``. This may require copying data and coercing values, which may be expensive. Parameters ---------- dtype : str or numpy.dtype, optional The dtype to pass to :meth:`numpy.asarray` copy : bool, default False Whether to ensure that the returned value is a not a view on another array. Note that ``copy=False`` does not *ensure* that ``to_numpy()`` is no-copy. Rather, ``copy=True`` ensure that a copy is made, even if not strictly necessary. Returns ------- numpy.ndarray See Also -------- Series.to_numpy : Similar method for Series. Examples -------- >>> pd.DataFrame({"A": [1, 2], "B": [3, 4]}).to_numpy() array([[1, 3], [2, 4]]) With heterogenous data, the lowest common type will have to be used. >>> df = pd.DataFrame({"A": [1, 2], "B": [3.0, 4.5]}) >>> df.to_numpy() array([[1. , 3. ], [2. , 4.5]]) For a mix of numeric and non-numeric types, the output array will have object dtype. >>> df['C'] = pd.date_range('2000', periods=2) >>> df.to_numpy() array([[1, 3.0, Timestamp('2000-01-01 00:00:00')], [2, 4.5, Timestamp('2000-01-02 00:00:00')]], dtype=object) """ result = np.array(self.values, dtype=dtype, copy=copy) return result def to_dict(self, orient='dict', into=dict): """ Convert the DataFrame to a dictionary. The type of the key-value pairs can be customized with the parameters (see below). Parameters ---------- orient : str {'dict', 'list', 'series', 'split', 'records', 'index'} Determines the type of the values of the dictionary. - 'dict' (default) : dict like {column -> {index -> value}} - 'list' : dict like {column -> [values]} - 'series' : dict like {column -> Series(values)} - 'split' : dict like {'index' -> [index], 'columns' -> [columns], 'data' -> [values]} - 'records' : list like [{column -> value}, ... , {column -> value}] - 'index' : dict like {index -> {column -> value}} Abbreviations are allowed. `s` indicates `series` and `sp` indicates `split`. into : class, default dict The collections.abc.Mapping subclass used for all Mappings in the return value. Can be the actual class or an empty instance of the mapping type you want. If you want a collections.defaultdict, you must pass it initialized. .. versionadded:: 0.21.0 Returns ------- dict, list or collections.abc.Mapping Return a collections.abc.Mapping object representing the DataFrame. The resulting transformation depends on the `orient` parameter. See Also -------- DataFrame.from_dict: Create a DataFrame from a dictionary. DataFrame.to_json: Convert a DataFrame to JSON format. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], ... 'col2': [0.5, 0.75]}, ... index=['row1', 'row2']) >>> df col1 col2 row1 1 0.50 row2 2 0.75 >>> df.to_dict() {'col1': {'row1': 1, 'row2': 2}, 'col2': {'row1': 0.5, 'row2': 0.75}} You can specify the return orientation. >>> df.to_dict('series') {'col1': row1 1 row2 2 Name: col1, dtype: int64, 'col2': row1 0.50 row2 0.75 Name: col2, dtype: float64} >>> df.to_dict('split') {'index': ['row1', 'row2'], 'columns': ['col1', 'col2'], 'data': [[1, 0.5], [2, 0.75]]} >>> df.to_dict('records') [{'col1': 1, 'col2': 0.5}, {'col1': 2, 'col2': 0.75}] >>> df.to_dict('index') {'row1': {'col1': 1, 'col2': 0.5}, 'row2': {'col1': 2, 'col2': 0.75}} You can also specify the mapping type. >>> from collections import OrderedDict, defaultdict >>> df.to_dict(into=OrderedDict) OrderedDict([('col1', OrderedDict([('row1', 1), ('row2', 2)])), ('col2', OrderedDict([('row1', 0.5), ('row2', 0.75)]))]) If you want a `defaultdict`, you need to initialize it: >>> dd = defaultdict(list) >>> df.to_dict('records', into=dd) [defaultdict(<class 'list'>, {'col1': 1, 'col2': 0.5}), defaultdict(<class 'list'>, {'col1': 2, 'col2': 0.75})] """ if not self.columns.is_unique: warnings.warn("DataFrame columns are not unique, some " "columns will be omitted.", UserWarning, stacklevel=2) # GH16122 into_c = com.standardize_mapping(into) if orient.lower().startswith('d'): return into_c( (k, v.to_dict(into)) for k, v in self.items()) elif orient.lower().startswith('l'): return into_c((k, v.tolist()) for k, v in self.items()) elif orient.lower().startswith('sp'): return into_c((('index', self.index.tolist()), ('columns', self.columns.tolist()), ('data', [ list(map(com.maybe_box_datetimelike, t)) for t in self.itertuples(index=False, name=None) ]))) elif orient.lower().startswith('s'): return into_c((k, com.maybe_box_datetimelike(v)) for k, v in self.items()) elif orient.lower().startswith('r'): columns = self.columns.tolist() rows = (dict(zip(columns, row)) for row in self.itertuples(index=False, name=None)) return [ into_c((k, com.maybe_box_datetimelike(v)) for k, v in row.items()) for row in rows] elif orient.lower().startswith('i'): if not self.index.is_unique: raise ValueError( "DataFrame index must be unique for orient='index'." ) return into_c((t[0], dict(zip(self.columns, t[1:]))) for t in self.itertuples(name=None)) else: raise ValueError("orient '{o}' not understood".format(o=orient)) def to_gbq(self, destination_table, project_id=None, chunksize=None, reauth=False, if_exists='fail', auth_local_webserver=False, table_schema=None, location=None, progress_bar=True, credentials=None, verbose=None, private_key=None): """ Write a DataFrame to a Google BigQuery table. This function requires the `pandas-gbq package <https://pandas-gbq.readthedocs.io>`__. See the `How to authenticate with Google BigQuery <https://pandas-gbq.readthedocs.io/en/latest/howto/authentication.html>`__ guide for authentication instructions. Parameters ---------- destination_table : str Name of table to be written, in the form ``dataset.tablename``. project_id : str, optional Google BigQuery Account project ID. Optional when available from the environment. chunksize : int, optional Number of rows to be inserted in each chunk from the dataframe. Set to ``None`` to load the whole dataframe at once. reauth : bool, default False Force Google BigQuery to re-authenticate the user. This is useful if multiple accounts are used. if_exists : str, default 'fail' Behavior when the destination table exists. Value can be one of: ``'fail'`` If table exists, do nothing. ``'replace'`` If table exists, drop it, recreate it, and insert data. ``'append'`` If table exists, insert data. Create if does not exist. auth_local_webserver : bool, default False Use the `local webserver flow`_ instead of the `console flow`_ when getting user credentials. .. _local webserver flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server .. _console flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console *New in version 0.2.0 of pandas-gbq*. table_schema : list of dicts, optional List of BigQuery table fields to which according DataFrame columns conform to, e.g. ``[{'name': 'col1', 'type': 'STRING'},...]``. If schema is not provided, it will be generated according to dtypes of DataFrame columns. See BigQuery API documentation on available names of a field. *New in version 0.3.1 of pandas-gbq*. location : str, optional Location where the load job should run. See the `BigQuery locations documentation <https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a list of available locations. The location must match that of the target dataset. *New in version 0.5.0 of pandas-gbq*. progress_bar : bool, default True Use the library `tqdm` to show the progress bar for the upload, chunk by chunk. *New in version 0.5.0 of pandas-gbq*. credentials : google.auth.credentials.Credentials, optional Credentials for accessing Google APIs. Use this parameter to override default credentials, such as to use Compute Engine :class:`google.auth.compute_engine.Credentials` or Service Account :class:`google.oauth2.service_account.Credentials` directly. *New in version 0.8.0 of pandas-gbq*. .. versionadded:: 0.24.0 verbose : bool, deprecated Deprecated in pandas-gbq version 0.4.0. Use the `logging module to adjust verbosity instead <https://pandas-gbq.readthedocs.io/en/latest/intro.html#logging>`__. private_key : str, deprecated Deprecated in pandas-gbq version 0.8.0. Use the ``credentials`` parameter and :func:`google.oauth2.service_account.Credentials.from_service_account_info` or :func:`google.oauth2.service_account.Credentials.from_service_account_file` instead. Service account private key in JSON format. Can be file path or string contents. This is useful for remote server authentication (eg. Jupyter/IPython notebook on remote host). See Also -------- pandas_gbq.to_gbq : This function in the pandas-gbq library. read_gbq : Read a DataFrame from Google BigQuery. """ from pandas.io import gbq gbq.to_gbq(self, destination_table, project_id=project_id, chunksize=chunksize, reauth=reauth, if_exists=if_exists, auth_local_webserver=auth_local_webserver, table_schema=table_schema, location=location, progress_bar=progress_bar, credentials=credentials, verbose=verbose, private_key=private_key) @classmethod def from_records(cls, data, index=None, exclude=None, columns=None, coerce_float=False, nrows=None): """ Convert structured or record ndarray to DataFrame. Parameters ---------- data : ndarray (structured dtype), list of tuples, dict, or DataFrame index : string, list of fields, array-like Field of array to use as the index, alternately a specific set of input labels to use exclude : sequence, default None Columns or fields to exclude columns : sequence, default None Column names to use. If the passed data do not have names associated with them, this argument provides names for the columns. Otherwise this argument indicates the order of the columns in the result (any names not found in the data will become all-NA columns) coerce_float : boolean, default False Attempt to convert values of non-string, non-numeric objects (like decimal.Decimal) to floating point, useful for SQL result sets nrows : int, default None Number of rows to read if data is an iterator Returns ------- DataFrame """ # Make a copy of the input columns so we can modify it if columns is not None: columns = ensure_index(columns) if is_iterator(data): if nrows == 0: return cls() try: first_row = next(data) except StopIteration: return cls(index=index, columns=columns) dtype = None if hasattr(first_row, 'dtype') and first_row.dtype.names: dtype = first_row.dtype values = [first_row] if nrows is None: values += data else: values.extend(itertools.islice(data, nrows - 1)) if dtype is not None: data = np.array(values, dtype=dtype) else: data = values if isinstance(data, dict): if columns is None: columns = arr_columns = ensure_index(sorted(data)) arrays = [data[k] for k in columns] else: arrays = [] arr_columns = [] for k, v in data.items(): if k in columns: arr_columns.append(k) arrays.append(v) arrays, arr_columns = reorder_arrays(arrays, arr_columns, columns) elif isinstance(data, (np.ndarray, DataFrame)): arrays, columns = to_arrays(data, columns) if columns is not None: columns = ensure_index(columns) arr_columns = columns else: arrays, arr_columns = to_arrays(data, columns, coerce_float=coerce_float) arr_columns = ensure_index(arr_columns) if columns is not None: columns = ensure_index(columns) else: columns = arr_columns if exclude is None: exclude = set() else: exclude = set(exclude) result_index = None if index is not None: if (isinstance(index, str) or not hasattr(index, "__iter__")): i = columns.get_loc(index) exclude.add(index) if len(arrays) > 0: result_index = Index(arrays[i], name=index) else: result_index = Index([], name=index) else: try: index_data = [arrays[arr_columns.get_loc(field)] for field in index] result_index = ensure_index_from_sequences(index_data, names=index) exclude.update(index) except Exception: result_index = index if any(exclude): arr_exclude = [x for x in exclude if x in arr_columns] to_remove = [arr_columns.get_loc(col) for col in arr_exclude] arrays = [v for i, v in enumerate(arrays) if i not in to_remove] arr_columns = arr_columns.drop(arr_exclude) columns = columns.drop(exclude) mgr = arrays_to_mgr(arrays, arr_columns, result_index, columns) return cls(mgr) def to_records(self, index=True, convert_datetime64=None, column_dtypes=None, index_dtypes=None): """ Convert DataFrame to a NumPy record array. Index will be included as the first field of the record array if requested. Parameters ---------- index : bool, default True Include index in resulting record array, stored in 'index' field or using the index label, if set. convert_datetime64 : bool, default None .. deprecated:: 0.23.0 Whether to convert the index to datetime.datetime if it is a DatetimeIndex. column_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all columns. If a dictionary, a mapping of column names and indices (zero-indexed) to specific data types. index_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all index levels. If a dictionary, a mapping of index level names and indices (zero-indexed) to specific data types. This mapping is applied only if `index=True`. Returns ------- numpy.recarray NumPy ndarray with the DataFrame labels as fields and each row of the DataFrame as entries. See Also -------- DataFrame.from_records: Convert structured or record ndarray to DataFrame. numpy.recarray: An ndarray that allows field access using attributes, analogous to typed columns in a spreadsheet. Examples -------- >>> df = pd.DataFrame({'A': [1, 2], 'B': [0.5, 0.75]}, ... index=['a', 'b']) >>> df A B a 1 0.50 b 2 0.75 >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('index', 'O'), ('A', '<i8'), ('B', '<f8')]) If the DataFrame index has no label then the recarray field name is set to 'index'. If the index has a label then this is used as the field name: >>> df.index = df.index.rename("I") >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i8'), ('B', '<f8')]) The index can be excluded from the record array: >>> df.to_records(index=False) rec.array([(1, 0.5 ), (2, 0.75)], dtype=[('A', '<i8'), ('B', '<f8')]) Data types can be specified for the columns: >>> df.to_records(column_dtypes={"A": "int32"}) rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i4'), ('B', '<f8')]) As well as for the index: >>> df.to_records(index_dtypes="<S2") rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S2'), ('A', '<i8'), ('B', '<f8')]) >>> index_dtypes = "<S{}".format(df.index.str.len().max()) >>> df.to_records(index_dtypes=index_dtypes) rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S1'), ('A', '<i8'), ('B', '<f8')]) """ if convert_datetime64 is not None: warnings.warn("The 'convert_datetime64' parameter is " "deprecated and will be removed in a future " "version", FutureWarning, stacklevel=2) if index: if is_datetime64_any_dtype(self.index) and convert_datetime64: ix_vals = [self.index.to_pydatetime()] else: if isinstance(self.index, MultiIndex): # array of tuples to numpy cols. copy copy copy ix_vals = list(map(np.array, zip(*self.index.values))) else: ix_vals = [self.index.values] arrays = ix_vals + [self[c].get_values() for c in self.columns] count = 0 index_names = list(self.index.names) if isinstance(self.index, MultiIndex): for i, n in enumerate(index_names): if n is None: index_names[i] = 'level_%d' % count count += 1 elif index_names[0] is None: index_names = ['index'] names = [str(name) for name in itertools.chain(index_names, self.columns)] else: arrays = [self[c].get_values() for c in self.columns] names = [str(c) for c in self.columns] index_names = [] index_len = len(index_names) formats = [] for i, v in enumerate(arrays): index = i # When the names and arrays are collected, we # first collect those in the DataFrame's index, # followed by those in its columns. # # Thus, the total length of the array is: # len(index_names) + len(DataFrame.columns). # # This check allows us to see whether we are # handling a name / array in the index or column. if index < index_len: dtype_mapping = index_dtypes name = index_names[index] else: index -= index_len dtype_mapping = column_dtypes name = self.columns[index] # We have a dictionary, so we get the data type # associated with the index or column (which can # be denoted by its name in the DataFrame or its # position in DataFrame's array of indices or # columns, whichever is applicable. if is_dict_like(dtype_mapping): if name in dtype_mapping: dtype_mapping = dtype_mapping[name] elif index in dtype_mapping: dtype_mapping = dtype_mapping[index] else: dtype_mapping = None # If no mapping can be found, use the array's # dtype attribute for formatting. # # A valid dtype must either be a type or # string naming a type. if dtype_mapping is None: formats.append(v.dtype) elif isinstance(dtype_mapping, (type, np.dtype, str)): formats.append(dtype_mapping) else: element = "row" if i < index_len else "column" msg = ("Invalid dtype {dtype} specified for " "{element} {name}").format(dtype=dtype_mapping, element=element, name=name) raise ValueError(msg) return np.rec.fromarrays( arrays, dtype={'names': names, 'formats': formats} ) @classmethod def from_items(cls, items, columns=None, orient='columns'): """ Construct a DataFrame from a list of tuples. .. deprecated:: 0.23.0 `from_items` is deprecated and will be removed in a future version. Use :meth:`DataFrame.from_dict(dict(items)) <DataFrame.from_dict>` instead. :meth:`DataFrame.from_dict(OrderedDict(items)) <DataFrame.from_dict>` may be used to preserve the key order. Convert (key, value) pairs to DataFrame. The keys will be the axis index (usually the columns, but depends on the specified orientation). The values should be arrays or Series. Parameters ---------- items : sequence of (key, value) pairs Values should be arrays or Series. columns : sequence of column labels, optional Must be passed if orient='index'. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the input correspond to column labels, pass 'columns' (default). Otherwise if the keys correspond to the index, pass 'index'. Returns ------- DataFrame """ warnings.warn("from_items is deprecated. Please use " "DataFrame.from_dict(dict(items), ...) instead. " "DataFrame.from_dict(OrderedDict(items)) may be used to " "preserve the key order.", FutureWarning, stacklevel=2) keys, values = zip(*items) if orient == 'columns': if columns is not None: columns = ensure_index(columns) idict = dict(items) if len(idict) < len(items): if not columns.equals(ensure_index(keys)): raise ValueError('With non-unique item names, passed ' 'columns must be identical') arrays = values else: arrays = [idict[k] for k in columns if k in idict] else: columns = ensure_index(keys) arrays = values # GH 17312 # Provide more informative error msg when scalar values passed try: return cls._from_arrays(arrays, columns, None) except ValueError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') elif orient == 'index': if columns is None: raise TypeError("Must pass columns with orient='index'") keys = ensure_index(keys) # GH 17312 # Provide more informative error msg when scalar values passed try: arr = np.array(values, dtype=object).T data = [lib.maybe_convert_objects(v) for v in arr] return cls._from_arrays(data, columns, keys) except TypeError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') else: # pragma: no cover raise ValueError("'orient' must be either 'columns' or 'index'") @classmethod def _from_arrays(cls, arrays, columns, index, dtype=None): mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) return cls(mgr) @classmethod def from_csv(cls, path, header=0, sep=',', index_col=0, parse_dates=True, encoding=None, tupleize_cols=None, infer_datetime_format=False): """ Read CSV file. .. deprecated:: 0.21.0 Use :func:`read_csv` instead. It is preferable to use the more powerful :func:`read_csv` for most general purposes, but ``from_csv`` makes for an easy roundtrip to and from a file (the exact counterpart of ``to_csv``), especially with a DataFrame of time series data. This method only differs from the preferred :func:`read_csv` in some defaults: - `index_col` is ``0`` instead of ``None`` (take first column as index by default) - `parse_dates` is ``True`` instead of ``False`` (try parsing the index as datetime by default) So a ``pd.DataFrame.from_csv(path)`` can be replaced by ``pd.read_csv(path, index_col=0, parse_dates=True)``. Parameters ---------- path : string file path or file handle / StringIO header : int, default 0 Row to use as header (skip prior rows) sep : string, default ',' Field delimiter index_col : int or sequence, default 0 Column to use for index. If a sequence is given, a MultiIndex is used. Different default from read_table parse_dates : boolean, default True Parse dates. Different default from read_table tupleize_cols : boolean, default False write multi_index columns as a list of tuples (if True) or new (expanded format) if False) infer_datetime_format : boolean, default False If True and `parse_dates` is True for a column, try to infer the datetime format based on the first datetime string. If the format can be inferred, there often will be a large parsing speed-up. Returns ------- DataFrame See Also -------- read_csv """ warnings.warn("from_csv is deprecated. Please use read_csv(...) " "instead. Note that some of the default arguments are " "different, so please refer to the documentation " "for from_csv when changing your function calls", FutureWarning, stacklevel=2) from pandas.io.parsers import read_csv return read_csv(path, header=header, sep=sep, parse_dates=parse_dates, index_col=index_col, encoding=encoding, tupleize_cols=tupleize_cols, infer_datetime_format=infer_datetime_format) def to_sparse(self, fill_value=None, kind='block'): """ Convert to SparseDataFrame. .. deprecated:: 0.25.0 Implement the sparse version of the DataFrame meaning that any data matching a specific value it's omitted in the representation. The sparse DataFrame allows for a more efficient storage. Parameters ---------- fill_value : float, default None The specific value that should be omitted in the representation. kind : {'block', 'integer'}, default 'block' The kind of the SparseIndex tracking where data is not equal to the fill value: - 'block' tracks only the locations and sizes of blocks of data. - 'integer' keeps an array with all the locations of the data. In most cases 'block' is recommended, since it's more memory efficient. Returns ------- SparseDataFrame The sparse representation of the DataFrame. See Also -------- DataFrame.to_dense : Converts the DataFrame back to the its dense form. Examples -------- >>> df = pd.DataFrame([(np.nan, np.nan), ... (1., np.nan), ... (np.nan, 1.)]) >>> df 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(df) <class 'pandas.core.frame.DataFrame'> >>> sdf = df.to_sparse() # doctest: +SKIP >>> sdf # doctest: +SKIP 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(sdf) # doctest: +SKIP <class 'pandas.core.sparse.frame.SparseDataFrame'> """ warnings.warn("DataFrame.to_sparse is deprecated and will be removed " "in a future version", FutureWarning, stacklevel=2) from pandas.core.sparse.api import SparseDataFrame with warnings.catch_warnings(): warnings.filterwarnings("ignore", message="SparseDataFrame") return SparseDataFrame(self._series, index=self.index, columns=self.columns, default_kind=kind, default_fill_value=fill_value) @deprecate_kwarg(old_arg_name='encoding', new_arg_name=None) def to_stata(self, fname, convert_dates=None, write_index=True, encoding="latin-1", byteorder=None, time_stamp=None, data_label=None, variable_labels=None, version=114, convert_strl=None): """ Export DataFrame object to Stata dta format. Writes the DataFrame to a Stata dataset file. "dta" files contain a Stata dataset. Parameters ---------- fname : str, buffer or path object String, path object (pathlib.Path or py._path.local.LocalPath) or object implementing a binary write() function. If using a buffer then the buffer will not be automatically closed after the file data has been written. convert_dates : dict Dictionary mapping columns containing datetime types to stata internal format to use when writing the dates. Options are 'tc', 'td', 'tm', 'tw', 'th', 'tq', 'ty'. Column can be either an integer or a name. Datetime columns that do not have a conversion type specified will be converted to 'tc'. Raises NotImplementedError if a datetime column has timezone information. write_index : bool Write the index to Stata dataset. encoding : str Default is latin-1. Unicode is not supported. byteorder : str Can be ">", "<", "little", or "big". default is `sys.byteorder`. time_stamp : datetime A datetime to use as file creation date. Default is the current time. data_label : str, optional A label for the data set. Must be 80 characters or smaller. variable_labels : dict Dictionary containing columns as keys and variable labels as values. Each label must be 80 characters or smaller. .. versionadded:: 0.19.0 version : {114, 117}, default 114 Version to use in the output dta file. Version 114 can be used read by Stata 10 and later. Version 117 can be read by Stata 13 or later. Version 114 limits string variables to 244 characters or fewer while 117 allows strings with lengths up to 2,000,000 characters. .. versionadded:: 0.23.0 convert_strl : list, optional List of column names to convert to string columns to Stata StrL format. Only available if version is 117. Storing strings in the StrL format can produce smaller dta files if strings have more than 8 characters and values are repeated. .. versionadded:: 0.23.0 Raises ------ NotImplementedError * If datetimes contain timezone information * Column dtype is not representable in Stata ValueError * Columns listed in convert_dates are neither datetime64[ns] or datetime.datetime * Column listed in convert_dates is not in DataFrame * Categorical label contains more than 32,000 characters .. versionadded:: 0.19.0 See Also -------- read_stata : Import Stata data files. io.stata.StataWriter : Low-level writer for Stata data files. io.stata.StataWriter117 : Low-level writer for version 117 files. Examples -------- >>> df = pd.DataFrame({'animal': ['falcon', 'parrot', 'falcon', ... 'parrot'], ... 'speed': [350, 18, 361, 15]}) >>> df.to_stata('animals.dta') # doctest: +SKIP """ kwargs = {} if version not in (114, 117): raise ValueError('Only formats 114 and 117 supported.') if version == 114: if convert_strl is not None: raise ValueError('strl support is only available when using ' 'format 117') from pandas.io.stata import StataWriter as statawriter else: from pandas.io.stata import StataWriter117 as statawriter kwargs['convert_strl'] = convert_strl writer = statawriter(fname, self, convert_dates=convert_dates, byteorder=byteorder, time_stamp=time_stamp, data_label=data_label, write_index=write_index, variable_labels=variable_labels, **kwargs) writer.write_file() def to_feather(self, fname): """ Write out the binary feather-format for DataFrames. .. versionadded:: 0.20.0 Parameters ---------- fname : str string file path """ from pandas.io.feather_format import to_feather to_feather(self, fname) def to_parquet(self, fname, engine='auto', compression='snappy', index=None, partition_cols=None, **kwargs): """ Write a DataFrame to the binary parquet format. .. versionadded:: 0.21.0 This function writes the dataframe as a `parquet file <https://parquet.apache.org/>`_. You can choose different parquet backends, and have the option of compression. See :ref:`the user guide <io.parquet>` for more details. Parameters ---------- fname : str File path or Root Directory path. Will be used as Root Directory path while writing a partitioned dataset. .. versionchanged:: 0.24.0 engine : {'auto', 'pyarrow', 'fastparquet'}, default 'auto' Parquet library to use. If 'auto', then the option ``io.parquet.engine`` is used. The default ``io.parquet.engine`` behavior is to try 'pyarrow', falling back to 'fastparquet' if 'pyarrow' is unavailable. compression : {'snappy', 'gzip', 'brotli', None}, default 'snappy' Name of the compression to use. Use ``None`` for no compression. index : bool, default None If ``True``, include the dataframe's index(es) in the file output. If ``False``, they will not be written to the file. If ``None``, the behavior depends on the chosen engine. .. versionadded:: 0.24.0 partition_cols : list, optional, default None Column names by which to partition the dataset Columns are partitioned in the order they are given .. versionadded:: 0.24.0 **kwargs Additional arguments passed to the parquet library. See :ref:`pandas io <io.parquet>` for more details. See Also -------- read_parquet : Read a parquet file. DataFrame.to_csv : Write a csv file. DataFrame.to_sql : Write to a sql table. DataFrame.to_hdf : Write to hdf. Notes ----- This function requires either the `fastparquet <https://pypi.org/project/fastparquet>`_ or `pyarrow <https://arrow.apache.org/docs/python/>`_ library. Examples -------- >>> df = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]}) >>> df.to_parquet('df.parquet.gzip', ... compression='gzip') # doctest: +SKIP >>> pd.read_parquet('df.parquet.gzip') # doctest: +SKIP col1 col2 0 1 3 1 2 4 """ from pandas.io.parquet import to_parquet to_parquet(self, fname, engine, compression=compression, index=index, partition_cols=partition_cols, **kwargs) @Substitution(header='Whether to print column labels, default True', col_space_type='str or int', col_space='The minimum width of each column in CSS length ' 'units. An int is assumed to be px units.\n\n' ' .. versionadded:: 0.25.0\n' ' Ability to use str') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_html(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', bold_rows=True, classes=None, escape=True, notebook=False, border=None, table_id=None, render_links=False): """ Render a DataFrame as an HTML table. %(shared_params)s bold_rows : bool, default True Make the row labels bold in the output. classes : str or list or tuple, default None CSS class(es) to apply to the resulting html table. escape : bool, default True Convert the characters <, >, and & to HTML-safe sequences. notebook : {True, False}, default False Whether the generated HTML is for IPython Notebook. border : int A ``border=border`` attribute is included in the opening `<table>` tag. Default ``pd.options.display.html.border``. .. versionadded:: 0.19.0 table_id : str, optional A css id is included in the opening `<table>` tag if specified. .. versionadded:: 0.23.0 render_links : bool, default False Convert URLs to HTML links. .. versionadded:: 0.24.0 %(returns)s See Also -------- to_string : Convert DataFrame to a string. """ if (justify is not None and justify not in fmt._VALID_JUSTIFY_PARAMETERS): raise ValueError("Invalid value for justify parameter") formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, bold_rows=bold_rows, escape=escape, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, table_id=table_id, render_links=render_links) # TODO: a generic formatter wld b in DataFrameFormatter formatter.to_html(classes=classes, notebook=notebook, border=border) if buf is None: return formatter.buf.getvalue() # ---------------------------------------------------------------------- def info(self, verbose=None, buf=None, max_cols=None, memory_usage=None, null_counts=None): """ Print a concise summary of a DataFrame. This method prints information about a DataFrame including the index dtype and column dtypes, non-null values and memory usage. Parameters ---------- verbose : bool, optional Whether to print the full summary. By default, the setting in ``pandas.options.display.max_info_columns`` is followed. buf : writable buffer, defaults to sys.stdout Where to send the output. By default, the output is printed to sys.stdout. Pass a writable buffer if you need to further process the output. max_cols : int, optional When to switch from the verbose to the truncated output. If the DataFrame has more than `max_cols` columns, the truncated output is used. By default, the setting in ``pandas.options.display.max_info_columns`` is used. memory_usage : bool, str, optional Specifies whether total memory usage of the DataFrame elements (including the index) should be displayed. By default, this follows the ``pandas.options.display.memory_usage`` setting. True always show memory usage. False never shows memory usage. A value of 'deep' is equivalent to "True with deep introspection". Memory usage is shown in human-readable units (base-2 representation). Without deep introspection a memory estimation is made based in column dtype and number of rows assuming values consume the same memory amount for corresponding dtypes. With deep memory introspection, a real memory usage calculation is performed at the cost of computational resources. null_counts : bool, optional Whether to show the non-null counts. By default, this is shown only if the frame is smaller than ``pandas.options.display.max_info_rows`` and ``pandas.options.display.max_info_columns``. A value of True always shows the counts, and False never shows the counts. Returns ------- None This method prints a summary of a DataFrame and returns None. See Also -------- DataFrame.describe: Generate descriptive statistics of DataFrame columns. DataFrame.memory_usage: Memory usage of DataFrame columns. Examples -------- >>> int_values = [1, 2, 3, 4, 5] >>> text_values = ['alpha', 'beta', 'gamma', 'delta', 'epsilon'] >>> float_values = [0.0, 0.25, 0.5, 0.75, 1.0] >>> df = pd.DataFrame({"int_col": int_values, "text_col": text_values, ... "float_col": float_values}) >>> df int_col text_col float_col 0 1 alpha 0.00 1 2 beta 0.25 2 3 gamma 0.50 3 4 delta 0.75 4 5 epsilon 1.00 Prints information of all columns: >>> df.info(verbose=True) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Data columns (total 3 columns): int_col 5 non-null int64 text_col 5 non-null object float_col 5 non-null float64 dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Prints a summary of columns count and its dtypes but not per column information: >>> df.info(verbose=False) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Columns: 3 entries, int_col to float_col dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Pipe output of DataFrame.info to buffer instead of sys.stdout, get buffer content and writes to a text file: >>> import io >>> buffer = io.StringIO() >>> df.info(buf=buffer) >>> s = buffer.getvalue() >>> with open("df_info.txt", "w", ... encoding="utf-8") as f: # doctest: +SKIP ... f.write(s) 260 The `memory_usage` parameter allows deep introspection mode, specially useful for big DataFrames and fine-tune memory optimization: >>> random_strings_array = np.random.choice(['a', 'b', 'c'], 10 ** 6) >>> df = pd.DataFrame({ ... 'column_1': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_2': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_3': np.random.choice(['a', 'b', 'c'], 10 ** 6) ... }) >>> df.info() <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 22.9+ MB >>> df.info(memory_usage='deep') <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 188.8 MB """ if buf is None: # pragma: no cover buf = sys.stdout lines = [] lines.append(str(type(self))) lines.append(self.index._summary()) if len(self.columns) == 0: lines.append('Empty {name}'.format(name=type(self).__name__)) fmt.buffer_put_lines(buf, lines) return cols = self.columns # hack if max_cols is None: max_cols = get_option('display.max_info_columns', len(self.columns) + 1) max_rows = get_option('display.max_info_rows', len(self) + 1) if null_counts is None: show_counts = ((len(self.columns) <= max_cols) and (len(self) < max_rows)) else: show_counts = null_counts exceeds_info_cols = len(self.columns) > max_cols def _verbose_repr(): lines.append('Data columns (total %d columns):' % len(self.columns)) space = max(len(pprint_thing(k)) for k in self.columns) + 4 counts = None tmpl = "{count}{dtype}" if show_counts: counts = self.count() if len(cols) != len(counts): # pragma: no cover raise AssertionError( 'Columns must equal counts ' '({cols:d} != {counts:d})'.format( cols=len(cols), counts=len(counts))) tmpl = "{count} non-null {dtype}" dtypes = self.dtypes for i, col in enumerate(self.columns): dtype = dtypes.iloc[i] col = pprint_thing(col) count = "" if show_counts: count = counts.iloc[i] lines.append(_put_str(col, space) + tmpl.format(count=count, dtype=dtype)) def _non_verbose_repr(): lines.append(self.columns._summary(name='Columns')) def _sizeof_fmt(num, size_qualifier): # returns size in human readable format for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: if num < 1024.0: return ("{num:3.1f}{size_q} " "{x}".format(num=num, size_q=size_qualifier, x=x)) num /= 1024.0 return "{num:3.1f}{size_q} {pb}".format(num=num, size_q=size_qualifier, pb='PB') if verbose: _verbose_repr() elif verbose is False: # specifically set to False, not nesc None _non_verbose_repr() else: if exceeds_info_cols: _non_verbose_repr() else: _verbose_repr() counts = self.get_dtype_counts() dtypes = ['{k}({kk:d})'.format(k=k[0], kk=k[1]) for k in sorted(counts.items())] lines.append('dtypes: {types}'.format(types=', '.join(dtypes))) if memory_usage is None: memory_usage = get_option('display.memory_usage') if memory_usage: # append memory usage of df to display size_qualifier = '' if memory_usage == 'deep': deep = True else: # size_qualifier is just a best effort; not guaranteed to catch # all cases (e.g., it misses categorical data even with object # categories) deep = False if ('object' in counts or self.index._is_memory_usage_qualified()): size_qualifier = '+' mem_usage = self.memory_usage(index=True, deep=deep).sum() lines.append("memory usage: {mem}\n".format( mem=_sizeof_fmt(mem_usage, size_qualifier))) fmt.buffer_put_lines(buf, lines) def memory_usage(self, index=True, deep=False): """ Return the memory usage of each column in bytes. The memory usage can optionally include the contribution of the index and elements of `object` dtype. This value is displayed in `DataFrame.info` by default. This can be suppressed by setting ``pandas.options.display.memory_usage`` to False. Parameters ---------- index : bool, default True Specifies whether to include the memory usage of the DataFrame's index in returned Series. If ``index=True``, the memory usage of the index is the first item in the output. deep : bool, default False If True, introspect the data deeply by interrogating `object` dtypes for system-level memory consumption, and include it in the returned values. Returns ------- Series A Series whose index is the original column names and whose values is the memory usage of each column in bytes. See Also -------- numpy.ndarray.nbytes : Total bytes consumed by the elements of an ndarray. Series.memory_usage : Bytes consumed by a Series. Categorical : Memory-efficient array for string values with many repeated values. DataFrame.info : Concise summary of a DataFrame. Examples -------- >>> dtypes = ['int64', 'float64', 'complex128', 'object', 'bool'] >>> data = dict([(t, np.ones(shape=5000).astype(t)) ... for t in dtypes]) >>> df = pd.DataFrame(data) >>> df.head() int64 float64 complex128 object bool 0 1 1.0 1.0+0.0j 1 True 1 1 1.0 1.0+0.0j 1 True 2 1 1.0 1.0+0.0j 1 True 3 1 1.0 1.0+0.0j 1 True 4 1 1.0 1.0+0.0j 1 True >>> df.memory_usage() Index 128 int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 >>> df.memory_usage(index=False) int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 The memory footprint of `object` dtype columns is ignored by default: >>> df.memory_usage(deep=True) Index 128 int64 40000 float64 40000 complex128 80000 object 160000 bool 5000 dtype: int64 Use a Categorical for efficient storage of an object-dtype column with many repeated values. >>> df['object'].astype('category').memory_usage(deep=True) 5216 """ result = Series([c.memory_usage(index=False, deep=deep) for col, c in self.iteritems()], index=self.columns) if index: result = Series(self.index.memory_usage(deep=deep), index=['Index']).append(result) return result def transpose(self, *args, **kwargs): """ Transpose index and columns. Reflect the DataFrame over its main diagonal by writing rows as columns and vice-versa. The property :attr:`.T` is an accessor to the method :meth:`transpose`. Parameters ---------- copy : bool, default False If True, the underlying data is copied. Otherwise (default), no copy is made if possible. *args, **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame The transposed DataFrame. See Also -------- numpy.transpose : Permute the dimensions of a given array. Notes ----- Transposing a DataFrame with mixed dtypes will result in a homogeneous DataFrame with the `object` dtype. In such a case, a copy of the data is always made. Examples -------- **Square DataFrame with homogeneous dtype** >>> d1 = {'col1': [1, 2], 'col2': [3, 4]} >>> df1 = pd.DataFrame(data=d1) >>> df1 col1 col2 0 1 3 1 2 4 >>> df1_transposed = df1.T # or df1.transpose() >>> df1_transposed 0 1 col1 1 2 col2 3 4 When the dtype is homogeneous in the original DataFrame, we get a transposed DataFrame with the same dtype: >>> df1.dtypes col1 int64 col2 int64 dtype: object >>> df1_transposed.dtypes 0 int64 1 int64 dtype: object **Non-square DataFrame with mixed dtypes** >>> d2 = {'name': ['Alice', 'Bob'], ... 'score': [9.5, 8], ... 'employed': [False, True], ... 'kids': [0, 0]} >>> df2 = pd.DataFrame(data=d2) >>> df2 name score employed kids 0 Alice 9.5 False 0 1 Bob 8.0 True 0 >>> df2_transposed = df2.T # or df2.transpose() >>> df2_transposed 0 1 name Alice Bob score 9.5 8 employed False True kids 0 0 When the DataFrame has mixed dtypes, we get a transposed DataFrame with the `object` dtype: >>> df2.dtypes name object score float64 employed bool kids int64 dtype: object >>> df2_transposed.dtypes 0 object 1 object dtype: object """ nv.validate_transpose(args, dict()) return super().transpose(1, 0, **kwargs) T = property(transpose) # ---------------------------------------------------------------------- # Picklability # legacy pickle formats def _unpickle_frame_compat(self, state): # pragma: no cover if len(state) == 2: # pragma: no cover series, idx = state columns = sorted(series) else: series, cols, idx = state columns = com._unpickle_array(cols) index = com._unpickle_array(idx) self._data = self._init_dict(series, index, columns, None) def _unpickle_matrix_compat(self, state): # pragma: no cover # old unpickling (vals, idx, cols), object_state = state index = com._unpickle_array(idx) dm = DataFrame(vals, index=index, columns=com._unpickle_array(cols), copy=False) if object_state is not None: ovals, _, ocols = object_state objects = DataFrame(ovals, index=index, columns=com._unpickle_array(ocols), copy=False) dm = dm.join(objects) self._data = dm._data # ---------------------------------------------------------------------- # Getting and setting elements def get_value(self, index, col, takeable=False): """ Quickly retrieve single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label takeable : interpret the index/col as indexers, default False Returns ------- scalar """ warnings.warn("get_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._get_value(index, col, takeable=takeable) def _get_value(self, index, col, takeable=False): if takeable: series = self._iget_item_cache(col) return com.maybe_box_datetimelike(series._values[index]) series = self._get_item_cache(col) engine = self.index._engine try: return engine.get_value(series._values, index) except KeyError: # GH 20629 if self.index.nlevels > 1: # partial indexing forbidden raise except (TypeError, ValueError): pass # we cannot handle direct indexing # use positional col = self.columns.get_loc(col) index = self.index.get_loc(index) return self._get_value(index, col, takeable=True) _get_value.__doc__ = get_value.__doc__ def set_value(self, index, col, value, takeable=False): """ Put single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label value : scalar takeable : interpret the index/col as indexers, default False Returns ------- DataFrame If label pair is contained, will be reference to calling DataFrame, otherwise a new object. """ warnings.warn("set_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._set_value(index, col, value, takeable=takeable) def _set_value(self, index, col, value, takeable=False): try: if takeable is True: series = self._iget_item_cache(col) return series._set_value(index, value, takeable=True) series = self._get_item_cache(col) engine = self.index._engine engine.set_value(series._values, index, value) return self except (KeyError, TypeError): # set using a non-recursive method & reset the cache if takeable: self.iloc[index, col] = value else: self.loc[index, col] = value self._item_cache.pop(col, None) return self _set_value.__doc__ = set_value.__doc__ def _ixs(self, i, axis=0): """ Parameters ---------- i : int, slice, or sequence of integers axis : int Notes ----- If slice passed, the resulting data will be a view. """ # irow if axis == 0: if isinstance(i, slice): return self[i] else: label = self.index[i] if isinstance(label, Index): # a location index by definition result = self.take(i, axis=axis) copy = True else: new_values = self._data.fast_xs(i) if is_scalar(new_values): return new_values # if we are a copy, mark as such copy = (isinstance(new_values, np.ndarray) and new_values.base is None) result = self._constructor_sliced(new_values, index=self.columns, name=self.index[i], dtype=new_values.dtype) result._set_is_copy(self, copy=copy) return result # icol else: label = self.columns[i] if isinstance(i, slice): # need to return view lab_slice = slice(label[0], label[-1]) return self.loc[:, lab_slice] else: if isinstance(label, Index): return self._take(i, axis=1) index_len = len(self.index) # if the values returned are not the same length # as the index (iow a not found value), iget returns # a 0-len ndarray. This is effectively catching # a numpy error (as numpy should really raise) values = self._data.iget(i) if index_len and not len(values): values = np.array([np.nan] * index_len, dtype=object) result = self._box_col_values(values, label) # this is a cached value, mark it so result._set_as_cached(label, self) return result def __getitem__(self, key): key = lib.item_from_zerodim(key) key = com.apply_if_callable(key, self) # shortcut if the key is in columns try: if self.columns.is_unique and key in self.columns: if self.columns.nlevels > 1: return self._getitem_multilevel(key) return self._get_item_cache(key) except (TypeError, ValueError): # The TypeError correctly catches non hashable "key" (e.g. list) # The ValueError can be removed once GH #21729 is fixed pass # Do we have a slicer (on rows)? indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._slice(indexer, axis=0) # Do we have a (boolean) DataFrame? if isinstance(key, DataFrame): return self._getitem_frame(key) # Do we have a (boolean) 1d indexer? if com.is_bool_indexer(key): return self._getitem_bool_array(key) # We are left with two options: a single key, and a collection of keys, # We interpret tuples as collections only for non-MultiIndex is_single_key = isinstance(key, tuple) or not is_list_like(key) if is_single_key: if self.columns.nlevels > 1: return self._getitem_multilevel(key) indexer = self.columns.get_loc(key) if is_integer(indexer): indexer = [indexer] else: if is_iterator(key): key = list(key) indexer = self.loc._convert_to_indexer(key, axis=1, raise_missing=True) # take() does not accept boolean indexers if getattr(indexer, "dtype", None) == bool: indexer = np.where(indexer)[0] data = self._take(indexer, axis=1) if is_single_key: # What does looking for a single key in a non-unique index return? # The behavior is inconsistent. It returns a Series, except when # - the key itself is repeated (test on data.shape, #9519), or # - we have a MultiIndex on columns (test on self.columns, #21309) if data.shape[1] == 1 and not isinstance(self.columns, MultiIndex): data = data[key] return data def _getitem_bool_array(self, key): # also raises Exception if object array with NA values # warning here just in case -- previously __setitem__ was # reindexing but __getitem__ was not; it seems more reasonable to # go with the __setitem__ behavior since that is more consistent # with all other indexing behavior if isinstance(key, Series) and not key.index.equals(self.index): warnings.warn("Boolean Series key will be reindexed to match " "DataFrame index.", UserWarning, stacklevel=3) elif len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d.' % (len(key), len(self.index))) # check_bool_indexer will throw exception if Series key cannot # be reindexed to match DataFrame rows key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] return self._take(indexer, axis=0) def _getitem_multilevel(self, key): loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): new_columns = self.columns[loc] result_columns = maybe_droplevels(new_columns, key) if self._is_mixed_type: result = self.reindex(columns=new_columns) result.columns = result_columns else: new_values = self.values[:, loc] result = self._constructor(new_values, index=self.index, columns=result_columns) result = result.__finalize__(self) # If there is only one column being returned, and its name is # either an empty string, or a tuple with an empty string as its # first element, then treat the empty string as a placeholder # and return the column as if the user had provided that empty # string in the key. If the result is a Series, exclude the # implied empty string from its name. if len(result.columns) == 1: top = result.columns[0] if isinstance(top, tuple): top = top[0] if top == '': result = result[''] if isinstance(result, Series): result = self._constructor_sliced(result, index=self.index, name=key) result._set_is_copy(self) return result else: return self._get_item_cache(key) def _getitem_frame(self, key): if key.values.size and not is_bool_dtype(key.values): raise ValueError('Must pass DataFrame with boolean values only') return self.where(key) def query(self, expr, inplace=False, **kwargs): """ Query the columns of a DataFrame with a boolean expression. Parameters ---------- expr : str The query string to evaluate. You can refer to variables in the environment by prefixing them with an '@' character like ``@a + b``. .. versionadded:: 0.25.0 You can refer to column names that contain spaces by surrounding them in backticks. For example, if one of your columns is called ``a a`` and you want to sum it with ``b``, your query should be ```a a` + b``. inplace : bool Whether the query should modify the data in place or return a modified copy. **kwargs See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`DataFrame.query`. .. versionadded:: 0.18.0 Returns ------- DataFrame DataFrame resulting from the provided query expression. See Also -------- eval : Evaluate a string describing operations on DataFrame columns. DataFrame.eval : Evaluate a string describing operations on DataFrame columns. Notes ----- The result of the evaluation of this expression is first passed to :attr:`DataFrame.loc` and if that fails because of a multidimensional key (e.g., a DataFrame) then the result will be passed to :meth:`DataFrame.__getitem__`. This method uses the top-level :func:`eval` function to evaluate the passed query. The :meth:`~pandas.DataFrame.query` method uses a slightly modified Python syntax by default. For example, the ``&`` and ``|`` (bitwise) operators have the precedence of their boolean cousins, :keyword:`and` and :keyword:`or`. This *is* syntactically valid Python, however the semantics are different. You can change the semantics of the expression by passing the keyword argument ``parser='python'``. This enforces the same semantics as evaluation in Python space. Likewise, you can pass ``engine='python'`` to evaluate an expression using Python itself as a backend. This is not recommended as it is inefficient compared to using ``numexpr`` as the engine. The :attr:`DataFrame.index` and :attr:`DataFrame.columns` attributes of the :class:`~pandas.DataFrame` instance are placed in the query namespace by default, which allows you to treat both the index and columns of the frame as a column in the frame. The identifier ``index`` is used for the frame index; you can also use the name of the index to identify it in a query. Please note that Python keywords may not be used as identifiers. For further details and examples see the ``query`` documentation in :ref:`indexing <indexing.query>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), ... 'B': range(10, 0, -2), ... 'C C': range(10, 5, -1)}) >>> df A B C C 0 1 10 10 1 2 8 9 2 3 6 8 3 4 4 7 4 5 2 6 >>> df.query('A > B') A B C C 4 5 2 6 The previous expression is equivalent to >>> df[df.A > df.B] A B C C 4 5 2 6 For columns with spaces in their name, you can use backtick quoting. >>> df.query('B == `C C`') A B C C 0 1 10 10 The previous expression is equivalent to >>> df[df.B == df['C C']] A B C C 0 1 10 10 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(expr, str): msg = "expr must be a string to be evaluated, {0} given" raise ValueError(msg.format(type(expr))) kwargs['level'] = kwargs.pop('level', 0) + 1 kwargs['target'] = None res = self.eval(expr, **kwargs) try: new_data = self.loc[res] except ValueError: # when res is multi-dimensional loc raises, but this is sometimes a # valid query new_data = self[res] if inplace: self._update_inplace(new_data) else: return new_data def eval(self, expr, inplace=False, **kwargs): """ Evaluate a string describing operations on DataFrame columns. Operates on columns only, not specific rows or elements. This allows `eval` to run arbitrary code, which can make you vulnerable to code injection if you pass user input to this function. Parameters ---------- expr : str The expression string to evaluate. inplace : bool, default False If the expression contains an assignment, whether to perform the operation inplace and mutate the existing DataFrame. Otherwise, a new DataFrame is returned. .. versionadded:: 0.18.0. kwargs : dict See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`~pandas.DataFrame.query`. Returns ------- ndarray, scalar, or pandas object The result of the evaluation. See Also -------- DataFrame.query : Evaluates a boolean expression to query the columns of a frame. DataFrame.assign : Can evaluate an expression or function to create new values for a column. eval : Evaluate a Python expression as a string using various backends. Notes ----- For more details see the API documentation for :func:`~eval`. For detailed examples see :ref:`enhancing performance with eval <enhancingperf.eval>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), 'B': range(10, 0, -2)}) >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 >>> df.eval('A + B') 0 11 1 10 2 9 3 8 4 7 dtype: int64 Assignment is allowed though by default the original DataFrame is not modified. >>> df.eval('C = A + B') A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 Use ``inplace=True`` to modify the original DataFrame. >>> df.eval('C = A + B', inplace=True) >>> df A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 """ from pandas.core.computation.eval import eval as _eval inplace = validate_bool_kwarg(inplace, 'inplace') resolvers = kwargs.pop('resolvers', None) kwargs['level'] = kwargs.pop('level', 0) + 1 if resolvers is None: index_resolvers = self._get_index_resolvers() column_resolvers = \ self._get_space_character_free_column_resolvers() resolvers = column_resolvers, index_resolvers if 'target' not in kwargs: kwargs['target'] = self kwargs['resolvers'] = kwargs.get('resolvers', ()) + tuple(resolvers) return _eval(expr, inplace=inplace, **kwargs) def select_dtypes(self, include=None, exclude=None): """ Return a subset of the DataFrame's columns based on the column dtypes. Parameters ---------- include, exclude : scalar or list-like A selection of dtypes or strings to be included/excluded. At least one of these parameters must be supplied. Returns ------- DataFrame The subset of the frame including the dtypes in ``include`` and excluding the dtypes in ``exclude``. Raises ------ ValueError * If both of ``include`` and ``exclude`` are empty * If ``include`` and ``exclude`` have overlapping elements * If any kind of string dtype is passed in. Notes ----- * To select all *numeric* types, use ``np.number`` or ``'number'`` * To select strings you must use the ``object`` dtype, but note that this will return *all* object dtype columns * See the `numpy dtype hierarchy <http://docs.scipy.org/doc/numpy/reference/arrays.scalars.html>`__ * To select datetimes, use ``np.datetime64``, ``'datetime'`` or ``'datetime64'`` * To select timedeltas, use ``np.timedelta64``, ``'timedelta'`` or ``'timedelta64'`` * To select Pandas categorical dtypes, use ``'category'`` * To select Pandas datetimetz dtypes, use ``'datetimetz'`` (new in 0.20.0) or ``'datetime64[ns, tz]'`` Examples -------- >>> df = pd.DataFrame({'a': [1, 2] * 3, ... 'b': [True, False] * 3, ... 'c': [1.0, 2.0] * 3}) >>> df a b c 0 1 True 1.0 1 2 False 2.0 2 1 True 1.0 3 2 False 2.0 4 1 True 1.0 5 2 False 2.0 >>> df.select_dtypes(include='bool') b 0 True 1 False 2 True 3 False 4 True 5 False >>> df.select_dtypes(include=['float64']) c 0 1.0 1 2.0 2 1.0 3 2.0 4 1.0 5 2.0 >>> df.select_dtypes(exclude=['int']) b c 0 True 1.0 1 False 2.0 2 True 1.0 3 False 2.0 4 True 1.0 5 False 2.0 """ def _get_info_slice(obj, indexer): """Slice the info axis of `obj` with `indexer`.""" if not hasattr(obj, '_info_axis_number'): msg = 'object of type {typ!r} has no info axis' raise TypeError(msg.format(typ=type(obj).__name__)) slices = [slice(None)] * obj.ndim slices[obj._info_axis_number] = indexer return tuple(slices) if not is_list_like(include): include = (include,) if include is not None else () if not is_list_like(exclude): exclude = (exclude,) if exclude is not None else () selection = tuple(map(frozenset, (include, exclude))) if not any(selection): raise ValueError('at least one of include or exclude must be ' 'nonempty') # convert the myriad valid dtypes object to a single representation include, exclude = map( lambda x: frozenset(map(infer_dtype_from_object, x)), selection) for dtypes in (include, exclude): invalidate_string_dtypes(dtypes) # can't both include AND exclude! if not include.isdisjoint(exclude): raise ValueError('include and exclude overlap on {inc_ex}'.format( inc_ex=(include & exclude))) # empty include/exclude -> defaults to True # three cases (we've already raised if both are empty) # case 1: empty include, nonempty exclude # we have True, True, ... True for include, same for exclude # in the loop below we get the excluded # and when we call '&' below we get only the excluded # case 2: nonempty include, empty exclude # same as case 1, but with include # case 3: both nonempty # the "union" of the logic of case 1 and case 2: # we get the included and excluded, and return their logical and include_these = Series(not bool(include), index=self.columns) exclude_these = Series(not bool(exclude), index=self.columns) def is_dtype_instance_mapper(idx, dtype): return idx, functools.partial(issubclass, dtype.type) for idx, f in itertools.starmap(is_dtype_instance_mapper, enumerate(self.dtypes)): if include: # checks for the case of empty include or exclude include_these.iloc[idx] = any(map(f, include)) if exclude: exclude_these.iloc[idx] = not any(map(f, exclude)) dtype_indexer = include_these & exclude_these return self.loc[_get_info_slice(self, dtype_indexer)] def _box_item_values(self, key, values): items = self.columns[self.columns.get_loc(key)] if values.ndim == 2: return self._constructor(values.T, columns=items, index=self.index) else: return self._box_col_values(values, items) def _box_col_values(self, values, items): """ Provide boxed values for a column. """ klass = self._constructor_sliced return klass(values, index=self.index, name=items, fastpath=True) def __setitem__(self, key, value): key = com.apply_if_callable(key, self) # see if we can slice the rows indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._setitem_slice(indexer, value) if isinstance(key, DataFrame) or getattr(key, 'ndim', None) == 2: self._setitem_frame(key, value) elif isinstance(key, (Series, np.ndarray, list, Index)): self._setitem_array(key, value) else: # set column self._set_item(key, value) def _setitem_slice(self, key, value): self._check_setitem_copy() self.loc._setitem_with_indexer(key, value) def _setitem_array(self, key, value): # also raises Exception if object array with NA values if com.is_bool_indexer(key): if len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d!' % (len(key), len(self.index))) key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] self._check_setitem_copy() self.loc._setitem_with_indexer(indexer, value) else: if isinstance(value, DataFrame): if len(value.columns) != len(key): raise ValueError('Columns must be same length as key') for k1, k2 in zip(key, value.columns): self[k1] = value[k2] else: indexer = self.loc._convert_to_indexer(key, axis=1) self._check_setitem_copy() self.loc._setitem_with_indexer((slice(None), indexer), value) def _setitem_frame(self, key, value): # support boolean setting with DataFrame input, e.g. # df[df > df2] = 0 if isinstance(key, np.ndarray): if key.shape != self.shape: raise ValueError( 'Array conditional must be same shape as self' ) key = self._constructor(key, **self._construct_axes_dict()) if key.values.size and not is_bool_dtype(key.values): raise TypeError( 'Must pass DataFrame or 2-d ndarray with boolean values only' ) self._check_inplace_setting(value) self._check_setitem_copy() self._where(-key, value, inplace=True) def _ensure_valid_index(self, value): """ Ensure that if we don't have an index, that we can create one from the passed value. """ # GH5632, make sure that we are a Series convertible if not len(self.index) and is_list_like(value): try: value = Series(value) except (ValueError, NotImplementedError, TypeError): raise ValueError('Cannot set a frame with no defined index ' 'and a value that cannot be converted to a ' 'Series') self._data = self._data.reindex_axis(value.index.copy(), axis=1, fill_value=np.nan) def _set_item(self, key, value): """ Add series to DataFrame in specified column. If series is a numpy-array (not a Series/TimeSeries), it must be the same length as the DataFrames index or an error will be thrown. Series/TimeSeries will be conformed to the DataFrames index to ensure homogeneity. """ self._ensure_valid_index(value) value = self._sanitize_column(key, value) NDFrame._set_item(self, key, value) # check if we are modifying a copy # try to set first as we want an invalid # value exception to occur first if len(self): self._check_setitem_copy() def insert(self, loc, column, value, allow_duplicates=False): """ Insert column into DataFrame at specified location. Raises a ValueError if `column` is already contained in the DataFrame, unless `allow_duplicates` is set to True. Parameters ---------- loc : int Insertion index. Must verify 0 <= loc <= len(columns) column : string, number, or hashable object label of the inserted column value : int, Series, or array-like allow_duplicates : bool, optional """ self._ensure_valid_index(value) value = self._sanitize_column(column, value, broadcast=False) self._data.insert(loc, column, value, allow_duplicates=allow_duplicates) def assign(self, **kwargs): r""" Assign new columns to a DataFrame. Returns a new object with all original columns in addition to new ones. Existing columns that are re-assigned will be overwritten. Parameters ---------- **kwargs : dict of {str: callable or Series} The column names are keywords. If the values are callable, they are computed on the DataFrame and assigned to the new columns. The callable must not change input DataFrame (though pandas doesn't check it). If the values are not callable, (e.g. a Series, scalar, or array), they are simply assigned. Returns ------- DataFrame A new DataFrame with the new columns in addition to all the existing columns. Notes ----- Assigning multiple columns within the same ``assign`` is possible. For Python 3.6 and above, later items in '\*\*kwargs' may refer to newly created or modified columns in 'df'; items are computed and assigned into 'df' in order. For Python 3.5 and below, the order of keyword arguments is not specified, you cannot refer to newly created or modified columns. All items are computed first, and then assigned in alphabetical order. .. versionchanged :: 0.23.0 Keyword argument order is maintained for Python 3.6 and later. Examples -------- >>> df = pd.DataFrame({'temp_c': [17.0, 25.0]}, ... index=['Portland', 'Berkeley']) >>> df temp_c Portland 17.0 Berkeley 25.0 Where the value is a callable, evaluated on `df`: >>> df.assign(temp_f=lambda x: x.temp_c * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 Alternatively, the same behavior can be achieved by directly referencing an existing Series or sequence: >>> df.assign(temp_f=df['temp_c'] * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 In Python 3.6+, you can create multiple columns within the same assign where one of the columns depends on another one defined within the same assign: >>> df.assign(temp_f=lambda x: x['temp_c'] * 9 / 5 + 32, ... temp_k=lambda x: (x['temp_f'] + 459.67) * 5 / 9) temp_c temp_f temp_k Portland 17.0 62.6 290.15 Berkeley 25.0 77.0 298.15 """ data = self.copy() # >= 3.6 preserve order of kwargs if PY36: for k, v in kwargs.items(): data[k] = com.apply_if_callable(v, data) else: # <= 3.5: do all calculations first... results = OrderedDict() for k, v in kwargs.items(): results[k] = com.apply_if_callable(v, data) # <= 3.5 and earlier results = sorted(results.items()) # ... and then assign for k, v in results: data[k] = v return data def _sanitize_column(self, key, value, broadcast=True): """ Ensures new columns (which go into the BlockManager as new blocks) are always copied and converted into an array. Parameters ---------- key : object value : scalar, Series, or array-like broadcast : bool, default True If ``key`` matches multiple duplicate column names in the DataFrame, this parameter indicates whether ``value`` should be tiled so that the returned array contains a (duplicated) column for each occurrence of the key. If False, ``value`` will not be tiled. Returns ------- numpy.ndarray """ def reindexer(value): # reindex if necessary if value.index.equals(self.index) or not len(self.index): value = value._values.copy() else: # GH 4107 try: value = value.reindex(self.index)._values except Exception as e: # duplicate axis if not value.index.is_unique: raise e # other raise TypeError('incompatible index of inserted column ' 'with frame index') return value if isinstance(value, Series): value = reindexer(value) elif isinstance(value, DataFrame): # align right-hand-side columns if self.columns # is multi-index and self[key] is a sub-frame if isinstance(self.columns, MultiIndex) and key in self.columns: loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): cols = maybe_droplevels(self.columns[loc], key) if len(cols) and not cols.equals(value.columns): value = value.reindex(cols, axis=1) # now align rows value = reindexer(value).T elif isinstance(value, ExtensionArray): # Explicitly copy here, instead of in sanitize_index, # as sanitize_index won't copy an EA, even with copy=True value = value.copy() value = sanitize_index(value, self.index, copy=False) elif isinstance(value, Index) or is_sequence(value): # turn me into an ndarray value = sanitize_index(value, self.index, copy=False) if not isinstance(value, (np.ndarray, Index)): if isinstance(value, list) and len(value) > 0: value = maybe_convert_platform(value) else: value = com.asarray_tuplesafe(value) elif value.ndim == 2: value = value.copy().T elif isinstance(value, Index): value = value.copy(deep=True) else: value = value.copy() # possibly infer to datetimelike if is_object_dtype(value.dtype): value = maybe_infer_to_datetimelike(value) else: # cast ignores pandas dtypes. so save the dtype first infer_dtype, _ = infer_dtype_from_scalar( value, pandas_dtype=True) # upcast value = cast_scalar_to_array(len(self.index), value) value = maybe_cast_to_datetime(value, infer_dtype) # return internal types directly if is_extension_type(value) or is_extension_array_dtype(value): return value # broadcast across multiple columns if necessary if broadcast and key in self.columns and value.ndim == 1: if (not self.columns.is_unique or isinstance(self.columns, MultiIndex)): existing_piece = self[key] if isinstance(existing_piece, DataFrame): value = np.tile(value, (len(existing_piece.columns), 1)) return np.atleast_2d(np.asarray(value)) @property def _series(self): return {item: Series(self._data.iget(idx), index=self.index, name=item) for idx, item in enumerate(self.columns)} def lookup(self, row_labels, col_labels): """ Label-based "fancy indexing" function for DataFrame. Given equal-length arrays of row and column labels, return an array of the values corresponding to each (row, col) pair. Parameters ---------- row_labels : sequence The row labels to use for lookup col_labels : sequence The column labels to use for lookup Returns ------- numpy.ndarray Notes ----- Akin to:: result = [df.get_value(row, col) for row, col in zip(row_labels, col_labels)] Examples -------- values : ndarray The found values """ n = len(row_labels) if n != len(col_labels): raise ValueError('Row labels must have same size as column labels') thresh = 1000 if not self._is_mixed_type or n > thresh: values = self.values ridx = self.index.get_indexer(row_labels) cidx = self.columns.get_indexer(col_labels) if (ridx == -1).any(): raise KeyError('One or more row labels was not found') if (cidx == -1).any(): raise KeyError('One or more column labels was not found') flat_index = ridx * len(self.columns) + cidx result = values.flat[flat_index] else: result = np.empty(n, dtype='O') for i, (r, c) in enumerate(zip(row_labels, col_labels)): result[i] = self._get_value(r, c) if is_object_dtype(result): result = lib.maybe_convert_objects(result) return result # ---------------------------------------------------------------------- # Reindexing and alignment def _reindex_axes(self, axes, level, limit, tolerance, method, fill_value, copy): frame = self columns = axes['columns'] if columns is not None: frame = frame._reindex_columns(columns, method, copy, level, fill_value, limit, tolerance) index = axes['index'] if index is not None: frame = frame._reindex_index(index, method, copy, level, fill_value, limit, tolerance) return frame def _reindex_index(self, new_index, method, copy, level, fill_value=np.nan, limit=None, tolerance=None): new_index, indexer = self.index.reindex(new_index, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({0: [new_index, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_columns(self, new_columns, method, copy, level, fill_value=None, limit=None, tolerance=None): new_columns, indexer = self.columns.reindex(new_columns, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({1: [new_columns, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_multi(self, axes, copy, fill_value): """ We are guaranteed non-Nones in the axes. """ new_index, row_indexer = self.index.reindex(axes['index']) new_columns, col_indexer = self.columns.reindex(axes['columns']) if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer new_values = algorithms.take_2d_multi(self.values, indexer, fill_value=fill_value) return self._constructor(new_values, index=new_index, columns=new_columns) else: return self._reindex_with_indexers({0: [new_index, row_indexer], 1: [new_columns, col_indexer]}, copy=copy, fill_value=fill_value) @Appender(_shared_docs['align'] % _shared_doc_kwargs) def align(self, other, join='outer', axis=None, level=None, copy=True, fill_value=None, method=None, limit=None, fill_axis=0, broadcast_axis=None): return super().align(other, join=join, axis=axis, level=level, copy=copy, fill_value=fill_value, method=method, limit=limit, fill_axis=fill_axis, broadcast_axis=broadcast_axis) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.reindex.__doc__) @rewrite_axis_style_signature('labels', [('method', None), ('copy', True), ('level', None), ('fill_value', np.nan), ('limit', None), ('tolerance', None)]) def reindex(self, *args, **kwargs): axes = validate_axis_style_args(self, args, kwargs, 'labels', 'reindex') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('labels', None) return super().reindex(**kwargs) @Appender(_shared_docs['reindex_axis'] % _shared_doc_kwargs) def reindex_axis(self, labels, axis=0, method=None, level=None, copy=True, limit=None, fill_value=np.nan): return super().reindex_axis(labels=labels, axis=axis, method=method, level=level, copy=copy, limit=limit, fill_value=fill_value) def drop(self, labels=None, axis=0, index=None, columns=None, level=None, inplace=False, errors='raise'): """ Drop specified labels from rows or columns. Remove rows or columns by specifying label names and corresponding axis, or by specifying directly index or column names. When using a multi-index, labels on different levels can be removed by specifying the level. Parameters ---------- labels : single label or list-like Index or column labels to drop. axis : {0 or 'index', 1 or 'columns'}, default 0 Whether to drop labels from the index (0 or 'index') or columns (1 or 'columns'). index : single label or list-like Alternative to specifying axis (``labels, axis=0`` is equivalent to ``index=labels``). .. versionadded:: 0.21.0 columns : single label or list-like Alternative to specifying axis (``labels, axis=1`` is equivalent to ``columns=labels``). .. versionadded:: 0.21.0 level : int or level name, optional For MultiIndex, level from which the labels will be removed. inplace : bool, default False If True, do operation inplace and return None. errors : {'ignore', 'raise'}, default 'raise' If 'ignore', suppress error and only existing labels are dropped. Returns ------- DataFrame DataFrame without the removed index or column labels. Raises ------ KeyError If any of the labels is not found in the selected axis. See Also -------- DataFrame.loc : Label-location based indexer for selection by label. DataFrame.dropna : Return DataFrame with labels on given axis omitted where (all or any) data are missing. DataFrame.drop_duplicates : Return DataFrame with duplicate rows removed, optionally only considering certain columns. Series.drop : Return Series with specified index labels removed. Examples -------- >>> df = pd.DataFrame(np.arange(12).reshape(3, 4), ... columns=['A', 'B', 'C', 'D']) >>> df A B C D 0 0 1 2 3 1 4 5 6 7 2 8 9 10 11 Drop columns >>> df.drop(['B', 'C'], axis=1) A D 0 0 3 1 4 7 2 8 11 >>> df.drop(columns=['B', 'C']) A D 0 0 3 1 4 7 2 8 11 Drop a row by index >>> df.drop([0, 1]) A B C D 2 8 9 10 11 Drop columns and/or rows of MultiIndex DataFrame >>> midx = pd.MultiIndex(levels=[['lama', 'cow', 'falcon'], ... ['speed', 'weight', 'length']], ... codes=[[0, 0, 0, 1, 1, 1, 2, 2, 2], ... [0, 1, 2, 0, 1, 2, 0, 1, 2]]) >>> df = pd.DataFrame(index=midx, columns=['big', 'small'], ... data=[[45, 30], [200, 100], [1.5, 1], [30, 20], ... [250, 150], [1.5, 0.8], [320, 250], ... [1, 0.8], [0.3, 0.2]]) >>> df big small lama speed 45.0 30.0 weight 200.0 100.0 length 1.5 1.0 cow speed 30.0 20.0 weight 250.0 150.0 length 1.5 0.8 falcon speed 320.0 250.0 weight 1.0 0.8 length 0.3 0.2 >>> df.drop(index='cow', columns='small') big lama speed 45.0 weight 200.0 length 1.5 falcon speed 320.0 weight 1.0 length 0.3 >>> df.drop(index='length', level=1) big small lama speed 45.0 30.0 weight 200.0 100.0 cow speed 30.0 20.0 weight 250.0 150.0 falcon speed 320.0 250.0 weight 1.0 0.8 """ return super().drop(labels=labels, axis=axis, index=index, columns=columns, level=level, inplace=inplace, errors=errors) @rewrite_axis_style_signature('mapper', [('copy', True), ('inplace', False), ('level', None), ('errors', 'ignore')]) def rename(self, *args, **kwargs): """ Alter axes labels. Function / dict values must be unique (1-to-1). Labels not contained in a dict / Series will be left as-is. Extra labels listed don't throw an error. See the :ref:`user guide <basics.rename>` for more. Parameters ---------- mapper : dict-like or function Dict-like or functions transformations to apply to that axis' values. Use either ``mapper`` and ``axis`` to specify the axis to target with ``mapper``, or ``index`` and ``columns``. index : dict-like or function Alternative to specifying axis (``mapper, axis=0`` is equivalent to ``index=mapper``). columns : dict-like or function Alternative to specifying axis (``mapper, axis=1`` is equivalent to ``columns=mapper``). axis : int or str Axis to target with ``mapper``. Can be either the axis name ('index', 'columns') or number (0, 1). The default is 'index'. copy : bool, default True Also copy underlying data. inplace : bool, default False Whether to return a new DataFrame. If True then value of copy is ignored. level : int or level name, default None In case of a MultiIndex, only rename labels in the specified level. errors : {'ignore', 'raise'}, default 'ignore' If 'raise', raise a `KeyError` when a dict-like `mapper`, `index`, or `columns` contains labels that are not present in the Index being transformed. If 'ignore', existing keys will be renamed and extra keys will be ignored. Returns ------- DataFrame DataFrame with the renamed axis labels. Raises ------ KeyError If any of the labels is not found in the selected axis and "errors='raise'". See Also -------- DataFrame.rename_axis : Set the name of the axis. Examples -------- ``DataFrame.rename`` supports two calling conventions * ``(index=index_mapper, columns=columns_mapper, ...)`` * ``(mapper, axis={'index', 'columns'}, ...)`` We *highly* recommend using keyword arguments to clarify your intent. Rename columns using a mapping: >>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]}) >>> df.rename(columns={"A": "a", "B": "c"}) a c 0 1 4 1 2 5 2 3 6 Rename index using a mapping: >>> df.rename(index={0: "x", 1: "y", 2: "z"}) A B x 1 4 y 2 5 z 3 6 Cast index labels to a different type: >>> df.index RangeIndex(start=0, stop=3, step=1) >>> df.rename(index=str).index Index(['0', '1', '2'], dtype='object') >>> df.rename(columns={"A": "a", "B": "b", "C": "c"}, errors="raise") Traceback (most recent call last): KeyError: ['C'] not found in axis Using axis-style parameters >>> df.rename(str.lower, axis='columns') a b 0 1 4 1 2 5 2 3 6 >>> df.rename({1: 2, 2: 4}, axis='index') A B 0 1 4 2 2 5 4 3 6 """ axes = validate_axis_style_args(self, args, kwargs, 'mapper', 'rename') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('mapper', None) return super().rename(**kwargs) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.fillna.__doc__) def fillna(self, value=None, method=None, axis=None, inplace=False, limit=None, downcast=None, **kwargs): return super().fillna(value=value, method=method, axis=axis, inplace=inplace, limit=limit, downcast=downcast, **kwargs) @Appender(_shared_docs['replace'] % _shared_doc_kwargs) def replace(self, to_replace=None, value=None, inplace=False, limit=None, regex=False, method='pad'): return super().replace(to_replace=to_replace, value=value, inplace=inplace, limit=limit, regex=regex, method=method) @Appender(_shared_docs['shift'] % _shared_doc_kwargs) def shift(self, periods=1, freq=None, axis=0, fill_value=None): return super().shift(periods=periods, freq=freq, axis=axis, fill_value=fill_value) def set_index(self, keys, drop=True, append=False, inplace=False, verify_integrity=False): """ Set the DataFrame index using existing columns. Set the DataFrame index (row labels) using one or more existing columns or arrays (of the correct length). The index can replace the existing index or expand on it. Parameters ---------- keys : label or array-like or list of labels/arrays This parameter can be either a single column key, a single array of the same length as the calling DataFrame, or a list containing an arbitrary combination of column keys and arrays. Here, "array" encompasses :class:`Series`, :class:`Index`, ``np.ndarray``, and instances of :class:`~collections.abc.Iterator`. drop : bool, default True Delete columns to be used as the new index. append : bool, default False Whether to append columns to existing index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). verify_integrity : bool, default False Check the new index for duplicates. Otherwise defer the check until necessary. Setting to False will improve the performance of this method. Returns ------- DataFrame Changed row labels. See Also -------- DataFrame.reset_index : Opposite of set_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame({'month': [1, 4, 7, 10], ... 'year': [2012, 2014, 2013, 2014], ... 'sale': [55, 40, 84, 31]}) >>> df month year sale 0 1 2012 55 1 4 2014 40 2 7 2013 84 3 10 2014 31 Set the index to become the 'month' column: >>> df.set_index('month') year sale month 1 2012 55 4 2014 40 7 2013 84 10 2014 31 Create a MultiIndex using columns 'year' and 'month': >>> df.set_index(['year', 'month']) sale year month 2012 1 55 2014 4 40 2013 7 84 2014 10 31 Create a MultiIndex using an Index and a column: >>> df.set_index([pd.Index([1, 2, 3, 4]), 'year']) month sale year 1 2012 1 55 2 2014 4 40 3 2013 7 84 4 2014 10 31 Create a MultiIndex using two Series: >>> s = pd.Series([1, 2, 3, 4]) >>> df.set_index([s, s**2]) month year sale 1 1 1 2012 55 2 4 4 2014 40 3 9 7 2013 84 4 16 10 2014 31 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(keys, list): keys = [keys] err_msg = ('The parameter "keys" may be a column key, one-dimensional ' 'array, or a list containing only valid column keys and ' 'one-dimensional arrays.') missing = [] for col in keys: if isinstance(col, (ABCIndexClass, ABCSeries, np.ndarray, list, abc.Iterator)): # arrays are fine as long as they are one-dimensional # iterators get converted to list below if getattr(col, 'ndim', 1) != 1: raise ValueError(err_msg) else: # everything else gets tried as a key; see GH 24969 try: found = col in self.columns except TypeError: raise TypeError(err_msg + ' Received column of ' 'type {}'.format(type(col))) else: if not found: missing.append(col) if missing: raise KeyError('None of {} are in the columns'.format(missing)) if inplace: frame = self else: frame = self.copy() arrays = [] names = [] if append: names = [x for x in self.index.names] if isinstance(self.index, ABCMultiIndex): for i in range(self.index.nlevels): arrays.append(self.index._get_level_values(i)) else: arrays.append(self.index) to_remove = [] for col in keys: if isinstance(col, ABCMultiIndex): for n in range(col.nlevels): arrays.append(col._get_level_values(n)) names.extend(col.names) elif isinstance(col, (ABCIndexClass, ABCSeries)): # if Index then not MultiIndex (treated above) arrays.append(col) names.append(col.name) elif isinstance(col, (list, np.ndarray)): arrays.append(col) names.append(None) elif isinstance(col, abc.Iterator): arrays.append(list(col)) names.append(None) # from here, col can only be a column label else: arrays.append(frame[col]._values) names.append(col) if drop: to_remove.append(col) if len(arrays[-1]) != len(self): # check newest element against length of calling frame, since # ensure_index_from_sequences would not raise for append=False. raise ValueError('Length mismatch: Expected {len_self} rows, ' 'received array of length {len_col}'.format( len_self=len(self), len_col=len(arrays[-1]) )) index = ensure_index_from_sequences(arrays, names) if verify_integrity and not index.is_unique: duplicates = index[index.duplicated()].unique() raise ValueError('Index has duplicate keys: {dup}'.format( dup=duplicates)) # use set to handle duplicate column names gracefully in case of drop for c in set(to_remove): del frame[c] # clear up memory usage index._cleanup() frame.index = index if not inplace: return frame def reset_index(self, level=None, drop=False, inplace=False, col_level=0, col_fill=''): """ Reset the index, or a level of it. Reset the index of the DataFrame, and use the default one instead. If the DataFrame has a MultiIndex, this method can remove one or more levels. Parameters ---------- level : int, str, tuple, or list, default None Only remove the given levels from the index. Removes all levels by default. drop : bool, default False Do not try to insert index into dataframe columns. This resets the index to the default integer index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). col_level : int or str, default 0 If the columns have multiple levels, determines which level the labels are inserted into. By default it is inserted into the first level. col_fill : object, default '' If the columns have multiple levels, determines how the other levels are named. If None then the index name is repeated. Returns ------- DataFrame DataFrame with the new index. See Also -------- DataFrame.set_index : Opposite of reset_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame([('bird', 389.0), ... ('bird', 24.0), ... ('mammal', 80.5), ... ('mammal', np.nan)], ... index=['falcon', 'parrot', 'lion', 'monkey'], ... columns=('class', 'max_speed')) >>> df class max_speed falcon bird 389.0 parrot bird 24.0 lion mammal 80.5 monkey mammal NaN When we reset the index, the old index is added as a column, and a new sequential index is used: >>> df.reset_index() index class max_speed 0 falcon bird 389.0 1 parrot bird 24.0 2 lion mammal 80.5 3 monkey mammal NaN We can use the `drop` parameter to avoid the old index being added as a column: >>> df.reset_index(drop=True) class max_speed 0 bird 389.0 1 bird 24.0 2 mammal 80.5 3 mammal NaN You can also use `reset_index` with `MultiIndex`. >>> index = pd.MultiIndex.from_tuples([('bird', 'falcon'), ... ('bird', 'parrot'), ... ('mammal', 'lion'), ... ('mammal', 'monkey')], ... names=['class', 'name']) >>> columns = pd.MultiIndex.from_tuples([('speed', 'max'), ... ('species', 'type')]) >>> df = pd.DataFrame([(389.0, 'fly'), ... ( 24.0, 'fly'), ... ( 80.5, 'run'), ... (np.nan, 'jump')], ... index=index, ... columns=columns) >>> df speed species max type class name bird falcon 389.0 fly parrot 24.0 fly mammal lion 80.5 run monkey NaN jump If the index has multiple levels, we can reset a subset of them: >>> df.reset_index(level='class') class speed species max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we are not dropping the index, by default, it is placed in the top level. We can place it in another level: >>> df.reset_index(level='class', col_level=1) speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump When the index is inserted under another level, we can specify under which one with the parameter `col_fill`: >>> df.reset_index(level='class', col_level=1, col_fill='species') species speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we specify a nonexistent level for `col_fill`, it is created: >>> df.reset_index(level='class', col_level=1, col_fill='genus') genus speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump """ inplace = validate_bool_kwarg(inplace, 'inplace') if inplace: new_obj = self else: new_obj = self.copy() def _maybe_casted_values(index, labels=None): values = index._values if not isinstance(index, (PeriodIndex, DatetimeIndex)): if values.dtype == np.object_: values = lib.maybe_convert_objects(values) # if we have the labels, extract the values with a mask if labels is not None: mask = labels == -1 # we can have situations where the whole mask is -1, # meaning there is nothing found in labels, so make all nan's if mask.all(): values = np.empty(len(mask)) values.fill(np.nan) else: values = values.take(labels) # TODO(https://github.com/pandas-dev/pandas/issues/24206) # Push this into maybe_upcast_putmask? # We can't pass EAs there right now. Looks a bit # complicated. # So we unbox the ndarray_values, op, re-box. values_type = type(values) values_dtype = values.dtype if issubclass(values_type, DatetimeLikeArray): values = values._data if mask.any(): values, changed = maybe_upcast_putmask( values, mask, np.nan) if issubclass(values_type, DatetimeLikeArray): values = values_type(values, dtype=values_dtype) return values new_index = ibase.default_index(len(new_obj)) if level is not None: if not isinstance(level, (tuple, list)): level = [level] level = [self.index._get_level_number(lev) for lev in level] if len(level) < self.index.nlevels: new_index = self.index.droplevel(level) if not drop: if isinstance(self.index, MultiIndex): names = [n if n is not None else ('level_%d' % i) for (i, n) in enumerate(self.index.names)] to_insert = zip(self.index.levels, self.index.codes) else: default = 'index' if 'index' not in self else 'level_0' names = ([default] if self.index.name is None else [self.index.name]) to_insert = ((self.index, None),) multi_col = isinstance(self.columns, MultiIndex) for i, (lev, lab) in reversed(list(enumerate(to_insert))): if not (level is None or i in level): continue name = names[i] if multi_col: col_name = (list(name) if isinstance(name, tuple) else [name]) if col_fill is None: if len(col_name) not in (1, self.columns.nlevels): raise ValueError("col_fill=None is incompatible " "with incomplete column name " "{}".format(name)) col_fill = col_name[0] lev_num = self.columns._get_level_number(col_level) name_lst = [col_fill] * lev_num + col_name missing = self.columns.nlevels - len(name_lst) name_lst += [col_fill] * missing name = tuple(name_lst) # to ndarray and maybe infer different dtype level_values = _maybe_casted_values(lev, lab) new_obj.insert(0, name, level_values) new_obj.index = new_index if not inplace: return new_obj # ---------------------------------------------------------------------- # Reindex-based selection methods @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isna(self): return super().isna() @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isnull(self): return super().isnull() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notna(self): return super().notna() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notnull(self): return super().notnull() def dropna(self, axis=0, how='any', thresh=None, subset=None, inplace=False): """ Remove missing values. See the :ref:`User Guide <missing_data>` for more on which values are considered missing, and how to work with missing data. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 Determine if rows or columns which contain missing values are removed. * 0, or 'index' : Drop rows which contain missing values. * 1, or 'columns' : Drop columns which contain missing value. .. deprecated:: 0.23.0 Pass tuple or list to drop on multiple axes. Only a single axis is allowed. how : {'any', 'all'}, default 'any' Determine if row or column is removed from DataFrame, when we have at least one NA or all NA. * 'any' : If any NA values are present, drop that row or column. * 'all' : If all values are NA, drop that row or column. thresh : int, optional Require that many non-NA values. subset : array-like, optional Labels along other axis to consider, e.g. if you are dropping rows these would be a list of columns to include. inplace : bool, default False If True, do operation inplace and return None. Returns ------- DataFrame DataFrame with NA entries dropped from it. See Also -------- DataFrame.isna: Indicate missing values. DataFrame.notna : Indicate existing (non-missing) values. DataFrame.fillna : Replace missing values. Series.dropna : Drop missing values. Index.dropna : Drop missing indices. Examples -------- >>> df = pd.DataFrame({"name": ['Alfred', 'Batman', 'Catwoman'], ... "toy": [np.nan, 'Batmobile', 'Bullwhip'], ... "born": [pd.NaT, pd.Timestamp("1940-04-25"), ... pd.NaT]}) >>> df name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Drop the rows where at least one element is missing. >>> df.dropna() name toy born 1 Batman Batmobile 1940-04-25 Drop the columns where at least one element is missing. >>> df.dropna(axis='columns') name 0 Alfred 1 Batman 2 Catwoman Drop the rows where all elements are missing. >>> df.dropna(how='all') name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Keep only the rows with at least 2 non-NA values. >>> df.dropna(thresh=2) name toy born 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Define in which columns to look for missing values. >>> df.dropna(subset=['name', 'born']) name toy born 1 Batman Batmobile 1940-04-25 Keep the DataFrame with valid entries in the same variable. >>> df.dropna(inplace=True) >>> df name toy born 1 Batman Batmobile 1940-04-25 """ inplace = validate_bool_kwarg(inplace, 'inplace') if isinstance(axis, (tuple, list)): # GH20987 msg = ("supplying multiple axes to axis is deprecated and " "will be removed in a future version.") warnings.warn(msg, FutureWarning, stacklevel=2) result = self for ax in axis: result = result.dropna(how=how, thresh=thresh, subset=subset, axis=ax) else: axis = self._get_axis_number(axis) agg_axis = 1 - axis agg_obj = self if subset is not None: ax = self._get_axis(agg_axis) indices = ax.get_indexer_for(subset) check = indices == -1 if check.any(): raise KeyError(list(np.compress(check, subset))) agg_obj = self.take(indices, axis=agg_axis) count = agg_obj.count(axis=agg_axis) if thresh is not None: mask = count >= thresh elif how == 'any': mask = count == len(agg_obj._get_axis(agg_axis)) elif how == 'all': mask = count > 0 else: if how is not None: raise ValueError('invalid how option: {h}'.format(h=how)) else: raise TypeError('must specify how or thresh') result = self.loc(axis=axis)[mask] if inplace: self._update_inplace(result) else: return result def drop_duplicates(self, subset=None, keep='first', inplace=False): """ Return DataFrame with duplicate rows removed, optionally only considering certain columns. Indexes, including time indexes are ignored. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Drop duplicates except for the first occurrence. - ``last`` : Drop duplicates except for the last occurrence. - False : Drop all duplicates. inplace : boolean, default False Whether to drop duplicates in place or to return a copy Returns ------- DataFrame """ if self.empty: return self.copy() inplace = validate_bool_kwarg(inplace, 'inplace') duplicated = self.duplicated(subset, keep=keep) if inplace: inds, = (-duplicated)._ndarray_values.nonzero() new_data = self._data.take(inds) self._update_inplace(new_data) else: return self[-duplicated] def duplicated(self, subset=None, keep='first'): """ Return boolean Series denoting duplicate rows, optionally only considering certain columns. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Mark duplicates as ``True`` except for the first occurrence. - ``last`` : Mark duplicates as ``True`` except for the last occurrence. - False : Mark all duplicates as ``True``. Returns ------- Series """ from pandas.core.sorting import get_group_index from pandas._libs.hashtable import duplicated_int64, _SIZE_HINT_LIMIT if self.empty: return Series(dtype=bool) def f(vals): labels, shape = algorithms.factorize( vals, size_hint=min(len(self), _SIZE_HINT_LIMIT)) return labels.astype('i8', copy=False), len(shape) if subset is None: subset = self.columns elif (not np.iterable(subset) or isinstance(subset, str) or isinstance(subset, tuple) and subset in self.columns): subset = subset, # Verify all columns in subset exist in the queried dataframe # Otherwise, raise a KeyError, same as if you try to __getitem__ with a # key that doesn't exist. diff = Index(subset).difference(self.columns) if not diff.empty: raise KeyError(diff) vals = (col.values for name, col in self.iteritems() if name in subset) labels, shape = map(list, zip(*map(f, vals))) ids = get_group_index(labels, shape, sort=False, xnull=False) return Series(duplicated_int64(ids, keep), index=self.index) # ---------------------------------------------------------------------- # Sorting @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_values.__doc__) def sort_values(self, by, axis=0, ascending=True, inplace=False, kind='quicksort', na_position='last'): inplace = validate_bool_kwarg(inplace, 'inplace') axis = self._get_axis_number(axis) if not isinstance(by, list): by = [by] if is_sequence(ascending) and len(by) != len(ascending): raise ValueError('Length of ascending (%d) != length of by (%d)' % (len(ascending), len(by))) if len(by) > 1: from pandas.core.sorting import lexsort_indexer keys = [self._get_label_or_level_values(x, axis=axis) for x in by] indexer = lexsort_indexer(keys, orders=ascending, na_position=na_position) indexer = ensure_platform_int(indexer) else: from pandas.core.sorting import nargsort by = by[0] k = self._get_label_or_level_values(by, axis=axis) if isinstance(ascending, (tuple, list)): ascending = ascending[0] indexer = nargsort(k, kind=kind, ascending=ascending, na_position=na_position) new_data = self._data.take(indexer, axis=self._get_block_manager_axis(axis), verify=False) if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_index.__doc__) def sort_index(self, axis=0, level=None, ascending=True, inplace=False, kind='quicksort', na_position='last', sort_remaining=True, by=None): # TODO: this can be combined with Series.sort_index impl as # almost identical inplace = validate_bool_kwarg(inplace, 'inplace') # 10726 if by is not None: warnings.warn("by argument to sort_index is deprecated, " "please use .sort_values(by=...)", FutureWarning, stacklevel=2) if level is not None: raise ValueError("unable to simultaneously sort by and level") return self.sort_values(by, axis=axis, ascending=ascending, inplace=inplace) axis = self._get_axis_number(axis) labels = self._get_axis(axis) # make sure that the axis is lexsorted to start # if not we need to reconstruct to get the correct indexer labels = labels._sort_levels_monotonic() if level is not None: new_axis, indexer = labels.sortlevel(level, ascending=ascending, sort_remaining=sort_remaining) elif isinstance(labels, MultiIndex): from pandas.core.sorting import lexsort_indexer indexer = lexsort_indexer(labels._get_codes_for_sorting(), orders=ascending, na_position=na_position) else: from pandas.core.sorting import nargsort # Check monotonic-ness before sort an index # GH11080 if ((ascending and labels.is_monotonic_increasing) or (not ascending and labels.is_monotonic_decreasing)): if inplace: return else: return self.copy() indexer = nargsort(labels, kind=kind, ascending=ascending, na_position=na_position) baxis = self._get_block_manager_axis(axis) new_data = self._data.take(indexer, axis=baxis, verify=False) # reconstruct axis if needed new_data.axes[baxis] = new_data.axes[baxis]._sort_levels_monotonic() if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) def nlargest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in descending order. Return the first `n` rows with the largest values in `columns`, in descending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=False).head(n)``, but more performant. Parameters ---------- n : int Number of rows to return. columns : label or list of labels Column label(s) to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - `first` : prioritize the first occurrence(s) - `last` : prioritize the last occurrence(s) - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame The first `n` rows ordered by the given columns in descending order. See Also -------- DataFrame.nsmallest : Return the first `n` rows ordered by `columns` in ascending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Notes ----- This function cannot be used with all column types. For example, when specifying columns with `object` or `category` dtypes, ``TypeError`` is raised. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nlargest`` to select the three rows having the largest values in column "population". >>> df.nlargest(3, 'population') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT When using ``keep='last'``, ties are resolved in reverse order: >>> df.nlargest(3, 'population', keep='last') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN When using ``keep='all'``, all duplicate items are maintained: >>> df.nlargest(3, 'population', keep='all') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN To order by the largest values in column "population" and then "GDP", we can specify multiple columns like in the next example. >>> df.nlargest(3, ['population', 'GDP']) population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nlargest() def nsmallest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in ascending order. Return the first `n` rows with the smallest values in `columns`, in ascending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=True).head(n)``, but more performant. Parameters ---------- n : int Number of items to retrieve. columns : list or str Column name or names to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - ``first`` : take the first occurrence. - ``last`` : take the last occurrence. - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame See Also -------- DataFrame.nlargest : Return the first `n` rows ordered by `columns` in descending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nsmallest`` to select the three rows having the smallest values in column "a". >>> df.nsmallest(3, 'population') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI When using ``keep='last'``, ties are resolved in reverse order: >>> df.nsmallest(3, 'population', keep='last') population GDP alpha-2 Anguilla 11300 311 AI Tuvalu 11300 38 TV Nauru 11300 182 NR When using ``keep='all'``, all duplicate items are maintained: >>> df.nsmallest(3, 'population', keep='all') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI To order by the largest values in column "a" and then "c", we can specify multiple columns like in the next example. >>> df.nsmallest(3, ['population', 'GDP']) population GDP alpha-2 Tuvalu 11300 38 TV Nauru 11300 182 NR Anguilla 11300 311 AI """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nsmallest() def swaplevel(self, i=-2, j=-1, axis=0): """ Swap levels i and j in a MultiIndex on a particular axis. Parameters ---------- i, j : int, string (can be mixed) Level of index to be swapped. Can pass level name as string. Returns ------- DataFrame .. versionchanged:: 0.18.1 The indexes ``i`` and ``j`` are now optional, and default to the two innermost levels of the index. """ result = self.copy() axis = self._get_axis_number(axis) if axis == 0: result.index = result.index.swaplevel(i, j) else: result.columns = result.columns.swaplevel(i, j) return result def reorder_levels(self, order, axis=0): """ Rearrange index levels using input order. May not drop or duplicate levels. Parameters ---------- order : list of int or list of str List representing new level order. Reference level by number (position) or by key (label). axis : int Where to reorder levels. Returns ------- type of caller (new object) """ axis = self._get_axis_number(axis) if not isinstance(self._get_axis(axis), MultiIndex): # pragma: no cover raise TypeError('Can only reorder levels on a hierarchical axis.') result = self.copy() if axis == 0: result.index = result.index.reorder_levels(order) else: result.columns = result.columns.reorder_levels(order) return result # ---------------------------------------------------------------------- # Arithmetic / combination related def _combine_frame(self, other, func, fill_value=None, level=None): this, other = self.align(other, join='outer', level=level, copy=False) new_index, new_columns = this.index, this.columns def _arith_op(left, right): # for the mixed_type case where we iterate over columns, # _arith_op(left, right) is equivalent to # left._binop(right, func, fill_value=fill_value) left, right = ops.fill_binop(left, right, fill_value) return func(left, right) if ops.should_series_dispatch(this, other, func): # iterate over columns return ops.dispatch_to_series(this, other, _arith_op) else: result = _arith_op(this.values, other.values) return self._constructor(result, index=new_index, columns=new_columns, copy=False) def _combine_match_index(self, other, func, level=None): left, right = self.align(other, join='outer', axis=0, level=level, copy=False) assert left.index.equals(right.index) if left._is_mixed_type or right._is_mixed_type: # operate column-wise; avoid costly object-casting in `.values` return ops.dispatch_to_series(left, right, func) else: # fastpath --> operate directly on values with np.errstate(all="ignore"): new_data = func(left.values.T, right.values).T return self._constructor(new_data, index=left.index, columns=self.columns, copy=False) def _combine_match_columns(self, other, func, level=None): assert isinstance(other, Series) left, right = self.align(other, join='outer', axis=1, level=level, copy=False) assert left.columns.equals(right.index) return ops.dispatch_to_series(left, right, func, axis="columns") def _combine_const(self, other, func): assert lib.is_scalar(other) or np.ndim(other) == 0 return ops.dispatch_to_series(self, other, func) def combine(self, other, func, fill_value=None, overwrite=True): """ Perform column-wise combine with another DataFrame. Combines a DataFrame with `other` DataFrame using `func` to element-wise combine columns. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame The DataFrame to merge column-wise. func : function Function that takes two series as inputs and return a Series or a scalar. Used to merge the two dataframes column by columns. fill_value : scalar value, default None The value to fill NaNs with prior to passing any column to the merge func. overwrite : bool, default True If True, columns in `self` that do not exist in `other` will be overwritten with NaNs. Returns ------- DataFrame Combination of the provided DataFrames. See Also -------- DataFrame.combine_first : Combine two DataFrame objects and default to non-null values in frame calling the method. Examples -------- Combine using a simple function that chooses the smaller column. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> take_smaller = lambda s1, s2: s1 if s1.sum() < s2.sum() else s2 >>> df1.combine(df2, take_smaller) A B 0 0 3 1 0 3 Example using a true element-wise combine function. >>> df1 = pd.DataFrame({'A': [5, 0], 'B': [2, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, np.minimum) A B 0 1 2 1 0 3 Using `fill_value` fills Nones prior to passing the column to the merge function. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 4.0 However, if the same element in both dataframes is None, that None is preserved >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [None, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 3.0 Example that demonstrates the use of `overwrite` and behavior when the axis differ between the dataframes. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [-10, 1], }, index=[1, 2]) >>> df1.combine(df2, take_smaller) A B C 0 NaN NaN NaN 1 NaN 3.0 -10.0 2 NaN 3.0 1.0 >>> df1.combine(df2, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 -10.0 2 NaN 3.0 1.0 Demonstrating the preference of the passed in dataframe. >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1], }, index=[1, 2]) >>> df2.combine(df1, take_smaller) A B C 0 0.0 NaN NaN 1 0.0 3.0 NaN 2 NaN 3.0 NaN >>> df2.combine(df1, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ other_idxlen = len(other.index) # save for compare this, other = self.align(other, copy=False) new_index = this.index if other.empty and len(new_index) == len(self.index): return self.copy() if self.empty and len(other) == other_idxlen: return other.copy() # sorts if possible new_columns = this.columns.union(other.columns) do_fill = fill_value is not None result = {} for col in new_columns: series = this[col] otherSeries = other[col] this_dtype = series.dtype other_dtype = otherSeries.dtype this_mask = isna(series) other_mask = isna(otherSeries) # don't overwrite columns unnecessarily # DO propagate if this column is not in the intersection if not overwrite and other_mask.all(): result[col] = this[col].copy() continue if do_fill: series = series.copy() otherSeries = otherSeries.copy() series[this_mask] = fill_value otherSeries[other_mask] = fill_value if col not in self.columns: # If self DataFrame does not have col in other DataFrame, # try to promote series, which is all NaN, as other_dtype. new_dtype = other_dtype try: series = series.astype(new_dtype, copy=False) except ValueError: # e.g. new_dtype is integer types pass else: # if we have different dtypes, possibly promote new_dtype = find_common_type([this_dtype, other_dtype]) if not is_dtype_equal(this_dtype, new_dtype): series = series.astype(new_dtype) if not is_dtype_equal(other_dtype, new_dtype): otherSeries = otherSeries.astype(new_dtype) arr = func(series, otherSeries) arr = maybe_downcast_to_dtype(arr, this_dtype) result[col] = arr # convert_objects just in case return self._constructor(result, index=new_index, columns=new_columns) def combine_first(self, other): """ Update null elements with value in the same location in `other`. Combine two DataFrame objects by filling null values in one DataFrame with non-null values from other DataFrame. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame Provided DataFrame to use to fill null values. Returns ------- DataFrame See Also -------- DataFrame.combine : Perform series-wise operation on two DataFrames using a given function. Examples -------- >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine_first(df2) A B 0 1.0 3.0 1 0.0 4.0 Null values still persist if the location of that null value does not exist in `other` >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [4, None]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1]}, index=[1, 2]) >>> df1.combine_first(df2) A B C 0 NaN 4.0 NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ import pandas.core.computation.expressions as expressions def extract_values(arr): # Does two things: # 1. maybe gets the values from the Series / Index # 2. convert datelike to i8 if isinstance(arr, (ABCIndexClass, ABCSeries)): arr = arr._values if needs_i8_conversion(arr): if is_extension_array_dtype(arr.dtype): arr = arr.asi8 else: arr = arr.view('i8') return arr def combiner(x, y): mask = isna(x) if isinstance(mask, (ABCIndexClass, ABCSeries)): mask = mask._values x_values = extract_values(x) y_values = extract_values(y) # If the column y in other DataFrame is not in first DataFrame, # just return y_values. if y.name not in self.columns: return y_values return expressions.where(mask, y_values, x_values) return self.combine(other, combiner, overwrite=False) @deprecate_kwarg(old_arg_name='raise_conflict', new_arg_name='errors', mapping={False: 'ignore', True: 'raise'}) def update(self, other, join='left', overwrite=True, filter_func=None, errors='ignore'): """ Modify in place using non-NA values from another DataFrame. Aligns on indices. There is no return value. Parameters ---------- other : DataFrame, or object coercible into a DataFrame Should have at least one matching index/column label with the original DataFrame. If a Series is passed, its name attribute must be set, and that will be used as the column name to align with the original DataFrame. join : {'left'}, default 'left' Only left join is implemented, keeping the index and columns of the original object. overwrite : bool, default True How to handle non-NA values for overlapping keys: * True: overwrite original DataFrame's values with values from `other`. * False: only update values that are NA in the original DataFrame. filter_func : callable(1d-array) -> bool 1d-array, optional Can choose to replace values other than NA. Return True for values that should be updated. errors : {'raise', 'ignore'}, default 'ignore' If 'raise', will raise a ValueError if the DataFrame and `other` both contain non-NA data in the same place. .. versionchanged :: 0.24.0 Changed from `raise_conflict=False|True` to `errors='ignore'|'raise'`. Returns ------- None : method directly changes calling object Raises ------ ValueError * When `errors='raise'` and there's overlapping non-NA data. * When `errors` is not either `'ignore'` or `'raise'` NotImplementedError * If `join != 'left'` See Also -------- dict.update : Similar method for dictionaries. DataFrame.merge : For column(s)-on-columns(s) operations. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, 5, 6], ... 'C': [7, 8, 9]}) >>> df.update(new_df) >>> df A B 0 1 4 1 2 5 2 3 6 The DataFrame's length does not increase as a result of the update, only values at matching index/column labels are updated. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e', 'f', 'g', 'h', 'i']}) >>> df.update(new_df) >>> df A B 0 a d 1 b e 2 c f For Series, it's name attribute must be set. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_column = pd.Series(['d', 'e'], name='B', index=[0, 2]) >>> df.update(new_column) >>> df A B 0 a d 1 b y 2 c e >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e']}, index=[1, 2]) >>> df.update(new_df) >>> df A B 0 a x 1 b d 2 c e If `other` contains NaNs the corresponding values are not updated in the original dataframe. >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, np.nan, 6]}) >>> df.update(new_df) >>> df A B 0 1 4.0 1 2 500.0 2 3 6.0 """ import pandas.core.computation.expressions as expressions # TODO: Support other joins if join != 'left': # pragma: no cover raise NotImplementedError("Only left join is supported") if errors not in ['ignore', 'raise']: raise ValueError("The parameter errors must be either " "'ignore' or 'raise'") if not isinstance(other, DataFrame): other = DataFrame(other) other = other.reindex_like(self) for col in self.columns: this = self[col]._values that = other[col]._values if filter_func is not None: with np.errstate(all='ignore'): mask = ~filter_func(this) | isna(that) else: if errors == 'raise': mask_this = notna(that) mask_that = notna(this) if any(mask_this & mask_that): raise ValueError("Data overlaps.") if overwrite: mask = isna(that) else: mask = notna(this) # don't overwrite columns unnecessarily if mask.all(): continue self[col] = expressions.where(mask, this, that) # ---------------------------------------------------------------------- # Data reshaping _shared_docs['pivot'] = """ Return reshaped DataFrame organized by given index / column values. Reshape data (produce a "pivot" table) based on column values. Uses unique values from specified `index` / `columns` to form axes of the resulting DataFrame. This function does not support data aggregation, multiple values will result in a MultiIndex in the columns. See the :ref:`User Guide <reshaping>` for more on reshaping. Parameters ----------%s index : string or object, optional Column to use to make new frame's index. If None, uses existing index. columns : string or object Column to use to make new frame's columns. values : string, object or a list of the previous, optional Column(s) to use for populating new frame's values. If not specified, all remaining columns will be used and the result will have hierarchically indexed columns. .. versionchanged :: 0.23.0 Also accept list of column names. Returns ------- DataFrame Returns reshaped DataFrame. Raises ------ ValueError: When there are any `index`, `columns` combinations with multiple values. `DataFrame.pivot_table` when you need to aggregate. See Also -------- DataFrame.pivot_table : Generalization of pivot that can handle duplicate values for one index/column pair. DataFrame.unstack : Pivot based on the index values instead of a column. Notes ----- For finer-tuned control, see hierarchical indexing documentation along with the related stack/unstack methods. Examples -------- >>> df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two', ... 'two'], ... 'bar': ['A', 'B', 'C', 'A', 'B', 'C'], ... 'baz': [1, 2, 3, 4, 5, 6], ... 'zoo': ['x', 'y', 'z', 'q', 'w', 't']}) >>> df foo bar baz zoo 0 one A 1 x 1 one B 2 y 2 one C 3 z 3 two A 4 q 4 two B 5 w 5 two C 6 t >>> df.pivot(index='foo', columns='bar', values='baz') bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar')['baz'] bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar', values=['baz', 'zoo']) baz zoo bar A B C A B C foo one 1 2 3 x y z two 4 5 6 q w t A ValueError is raised if there are any duplicates. >>> df = pd.DataFrame({"foo": ['one', 'one', 'two', 'two'], ... "bar": ['A', 'A', 'B', 'C'], ... "baz": [1, 2, 3, 4]}) >>> df foo bar baz 0 one A 1 1 one A 2 2 two B 3 3 two C 4 Notice that the first two rows are the same for our `index` and `columns` arguments. >>> df.pivot(index='foo', columns='bar', values='baz') Traceback (most recent call last): ... ValueError: Index contains duplicate entries, cannot reshape """ @Substitution('') @Appender(_shared_docs['pivot']) def pivot(self, index=None, columns=None, values=None): from pandas.core.reshape.pivot import pivot return pivot(self, index=index, columns=columns, values=values) _shared_docs['pivot_table'] = """ Create a spreadsheet-style pivot table as a DataFrame. The levels in the pivot table will be stored in MultiIndex objects (hierarchical indexes) on the index and columns of the result DataFrame. Parameters ----------%s values : column to aggregate, optional index : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table index. If an array is passed, it is being used as the same manner as column values. columns : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table column. If an array is passed, it is being used as the same manner as column values. aggfunc : function, list of functions, dict, default numpy.mean If list of functions passed, the resulting pivot table will have hierarchical columns whose top level are the function names (inferred from the function objects themselves) If dict is passed, the key is column to aggregate and value is function or list of functions fill_value : scalar, default None Value to replace missing values with margins : boolean, default False Add all row / columns (e.g. for subtotal / grand totals) dropna : boolean, default True Do not include columns whose entries are all NaN margins_name : string, default 'All' Name of the row / column that will contain the totals when margins is True. observed : boolean, default False This only applies if any of the groupers are Categoricals. If True: only show observed values for categorical groupers. If False: show all values for categorical groupers. .. versionchanged :: 0.25.0 Returns ------- DataFrame See Also -------- DataFrame.pivot : Pivot without aggregation that can handle non-numeric data. Examples -------- >>> df = pd.DataFrame({"A": ["foo", "foo", "foo", "foo", "foo", ... "bar", "bar", "bar", "bar"], ... "B": ["one", "one", "one", "two", "two", ... "one", "one", "two", "two"], ... "C": ["small", "large", "large", "small", ... "small", "large", "small", "small", ... "large"], ... "D": [1, 2, 2, 3, 3, 4, 5, 6, 7], ... "E": [2, 4, 5, 5, 6, 6, 8, 9, 9]}) >>> df A B C D E 0 foo one small 1 2 1 foo one large 2 4 2 foo one large 2 5 3 foo two small 3 5 4 foo two small 3 6 5 bar one large 4 6 6 bar one small 5 8 7 bar two small 6 9 8 bar two large 7 9 This first example aggregates values by taking the sum. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum) >>> table C large small A B bar one 4.0 5.0 two 7.0 6.0 foo one 4.0 1.0 two NaN 6.0 We can also fill missing values using the `fill_value` parameter. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum, fill_value=0) >>> table C large small A B bar one 4 5 two 7 6 foo one 4 1 two 0 6 The next example aggregates by taking the mean across multiple columns. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': np.mean}) >>> table D E A C bar large 5.500000 7.500000 small 5.500000 8.500000 foo large 2.000000 4.500000 small 2.333333 4.333333 We can also calculate multiple types of aggregations for any given value column. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': [min, max, np.mean]}) >>> table D E mean max mean min A C bar large 5.500000 9.0 7.500000 6.0 small 5.500000 9.0 8.500000 8.0 foo large 2.000000 5.0 4.500000 4.0 small 2.333333 6.0 4.333333 2.0 """ @Substitution('') @Appender(_shared_docs['pivot_table']) def pivot_table(self, values=None, index=None, columns=None, aggfunc='mean', fill_value=None, margins=False, dropna=True, margins_name='All', observed=False): from pandas.core.reshape.pivot import pivot_table return pivot_table(self, values=values, index=index, columns=columns, aggfunc=aggfunc, fill_value=fill_value, margins=margins, dropna=dropna, margins_name=margins_name, observed=observed) def stack(self, level=-1, dropna=True): """ Stack the prescribed level(s) from columns to index. Return a reshaped DataFrame or Series having a multi-level index with one or more new inner-most levels compared to the current DataFrame. The new inner-most levels are created by pivoting the columns of the current dataframe: - if the columns have a single level, the output is a Series; - if the columns have multiple levels, the new index level(s) is (are) taken from the prescribed level(s) and the output is a DataFrame. The new index levels are sorted. Parameters ---------- level : int, str, list, default -1 Level(s) to stack from the column axis onto the index axis, defined as one index or label, or a list of indices or labels. dropna : bool, default True Whether to drop rows in the resulting Frame/Series with missing values. Stacking a column level onto the index axis can create combinations of index and column values that are missing from the original dataframe. See Examples section. Returns ------- DataFrame or Series Stacked dataframe or series. See Also -------- DataFrame.unstack : Unstack prescribed level(s) from index axis onto column axis. DataFrame.pivot : Reshape dataframe from long format to wide format. DataFrame.pivot_table : Create a spreadsheet-style pivot table as a DataFrame. Notes ----- The function is named by analogy with a collection of books being reorganized from being side by side on a horizontal position (the columns of the dataframe) to being stacked vertically on top of each other (in the index of the dataframe). Examples -------- **Single level columns** >>> df_single_level_cols = pd.DataFrame([[0, 1], [2, 3]], ... index=['cat', 'dog'], ... columns=['weight', 'height']) Stacking a dataframe with a single level column axis returns a Series: >>> df_single_level_cols weight height cat 0 1 dog 2 3 >>> df_single_level_cols.stack() cat weight 0 height 1 dog weight 2 height 3 dtype: int64 **Multi level columns: simple case** >>> multicol1 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('weight', 'pounds')]) >>> df_multi_level_cols1 = pd.DataFrame([[1, 2], [2, 4]], ... index=['cat', 'dog'], ... columns=multicol1) Stacking a dataframe with a multi-level column axis: >>> df_multi_level_cols1 weight kg pounds cat 1 2 dog 2 4 >>> df_multi_level_cols1.stack() weight cat kg 1 pounds 2 dog kg 2 pounds 4 **Missing values** >>> multicol2 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('height', 'm')]) >>> df_multi_level_cols2 = pd.DataFrame([[1.0, 2.0], [3.0, 4.0]], ... index=['cat', 'dog'], ... columns=multicol2) It is common to have missing values when stacking a dataframe with multi-level columns, as the stacked dataframe typically has more values than the original dataframe. Missing values are filled with NaNs: >>> df_multi_level_cols2 weight height kg m cat 1.0 2.0 dog 3.0 4.0 >>> df_multi_level_cols2.stack() height weight cat kg NaN 1.0 m 2.0 NaN dog kg NaN 3.0 m 4.0 NaN **Prescribing the level(s) to be stacked** The first parameter controls which level or levels are stacked: >>> df_multi_level_cols2.stack(0) kg m cat height NaN 2.0 weight 1.0 NaN dog height NaN 4.0 weight 3.0 NaN >>> df_multi_level_cols2.stack([0, 1]) cat height m 2.0 weight kg 1.0 dog height m 4.0 weight kg 3.0 dtype: float64 **Dropping missing values** >>> df_multi_level_cols3 = pd.DataFrame([[None, 1.0], [2.0, 3.0]], ... index=['cat', 'dog'], ... columns=multicol2) Note that rows where all values are missing are dropped by default but this behaviour can be controlled via the dropna keyword parameter: >>> df_multi_level_cols3 weight height kg m cat NaN 1.0 dog 2.0 3.0 >>> df_multi_level_cols3.stack(dropna=False) height weight cat kg NaN NaN m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN >>> df_multi_level_cols3.stack(dropna=True) height weight cat m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN """ from pandas.core.reshape.reshape import stack, stack_multiple if isinstance(level, (tuple, list)): return stack_multiple(self, level, dropna=dropna) else: return stack(self, level, dropna=dropna) def unstack(self, level=-1, fill_value=None): """ Pivot a level of the (necessarily hierarchical) index labels, returning a DataFrame having a new level of column labels whose inner-most level consists of the pivoted index labels. If the index is not a MultiIndex, the output will be a Series (the analogue of stack when the columns are not a MultiIndex). The level involved will automatically get sorted. Parameters ---------- level : int, string, or list of these, default -1 (last level) Level(s) of index to unstack, can pass level name fill_value : replace NaN with this value if the unstack produces missing values .. versionadded:: 0.18.0 Returns ------- Series or DataFrame See Also -------- DataFrame.pivot : Pivot a table based on column values. DataFrame.stack : Pivot a level of the column labels (inverse operation from `unstack`). Examples -------- >>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'), ... ('two', 'a'), ('two', 'b')]) >>> s = pd.Series(np.arange(1.0, 5.0), index=index) >>> s one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 >>> s.unstack(level=-1) a b one 1.0 2.0 two 3.0 4.0 >>> s.unstack(level=0) one two a 1.0 3.0 b 2.0 4.0 >>> df = s.unstack(level=0) >>> df.unstack() one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 """ from pandas.core.reshape.reshape import unstack return unstack(self, level, fill_value) _shared_docs['melt'] = (""" Unpivot a DataFrame from wide format to long format, optionally leaving identifier variables set. This function is useful to massage a DataFrame into a format where one or more columns are identifier variables (`id_vars`), while all other columns, considered measured variables (`value_vars`), are "unpivoted" to the row axis, leaving just two non-identifier columns, 'variable' and 'value'. %(versionadded)s Parameters ---------- frame : DataFrame id_vars : tuple, list, or ndarray, optional Column(s) to use as identifier variables. value_vars : tuple, list, or ndarray, optional Column(s) to unpivot. If not specified, uses all columns that are not set as `id_vars`. var_name : scalar Name to use for the 'variable' column. If None it uses ``frame.columns.name`` or 'variable'. value_name : scalar, default 'value' Name to use for the 'value' column. col_level : int or string, optional If columns are a MultiIndex then use this level to melt. Returns ------- DataFrame Unpivoted DataFrame. See Also -------- %(other)s pivot_table DataFrame.pivot Examples -------- >>> df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'}, ... 'B': {0: 1, 1: 3, 2: 5}, ... 'C': {0: 2, 1: 4, 2: 6}}) >>> df A B C 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)sid_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=['A'], value_vars=['B', 'C']) A variable value 0 a B 1 1 b B 3 2 c B 5 3 a C 2 4 b C 4 5 c C 6 The names of 'variable' and 'value' columns can be customized: >>> %(caller)sid_vars=['A'], value_vars=['B'], ... var_name='myVarname', value_name='myValname') A myVarname myValname 0 a B 1 1 b B 3 2 c B 5 If you have multi-index columns: >>> df.columns = [list('ABC'), list('DEF')] >>> df A B C D E F 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)scol_level=0, id_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=[('A', 'D')], value_vars=[('B', 'E')]) (A, D) variable_0 variable_1 value 0 a B E 1 1 b B E 3 2 c B E 5 """) @Appender(_shared_docs['melt'] % dict(caller='df.melt(', versionadded='.. versionadded:: 0.20.0\n', other='melt')) def melt(self, id_vars=None, value_vars=None, var_name=None, value_name='value', col_level=None): from pandas.core.reshape.melt import melt return melt(self, id_vars=id_vars, value_vars=value_vars, var_name=var_name, value_name=value_name, col_level=col_level) # ---------------------------------------------------------------------- # Time series-related def diff(self, periods=1, axis=0): """ First discrete difference of element. Calculates the difference of a DataFrame element compared with another element in the DataFrame (default is the element in the same column of the previous row). Parameters ---------- periods : int, default 1 Periods to shift for calculating difference, accepts negative values. axis : {0 or 'index', 1 or 'columns'}, default 0 Take difference over rows (0) or columns (1). .. versionadded:: 0.16.1. Returns ------- DataFrame See Also -------- Series.diff: First discrete difference for a Series. DataFrame.pct_change: Percent change over given number of periods. DataFrame.shift: Shift index by desired number of periods with an optional time freq. Examples -------- Difference with previous row >>> df = pd.DataFrame({'a': [1, 2, 3, 4, 5, 6], ... 'b': [1, 1, 2, 3, 5, 8], ... 'c': [1, 4, 9, 16, 25, 36]}) >>> df a b c 0 1 1 1 1 2 1 4 2 3 2 9 3 4 3 16 4 5 5 25 5 6 8 36 >>> df.diff() a b c 0 NaN NaN NaN 1 1.0 0.0 3.0 2 1.0 1.0 5.0 3 1.0 1.0 7.0 4 1.0 2.0 9.0 5 1.0 3.0 11.0 Difference with previous column >>> df.diff(axis=1) a b c 0 NaN 0.0 0.0 1 NaN -1.0 3.0 2 NaN -1.0 7.0 3 NaN -1.0 13.0 4 NaN 0.0 20.0 5 NaN 2.0 28.0 Difference with 3rd previous row >>> df.diff(periods=3) a b c 0 NaN NaN NaN 1 NaN NaN NaN 2 NaN NaN NaN 3 3.0 2.0 15.0 4 3.0 4.0 21.0 5 3.0 6.0 27.0 Difference with following row >>> df.diff(periods=-1) a b c 0 -1.0 0.0 -3.0 1 -1.0 -1.0 -5.0 2 -1.0 -1.0 -7.0 3 -1.0 -2.0 -9.0 4 -1.0 -3.0 -11.0 5 NaN NaN NaN """ bm_axis = self._get_block_manager_axis(axis) new_data = self._data.diff(n=periods, axis=bm_axis) return self._constructor(new_data) # ---------------------------------------------------------------------- # Function application def _gotitem(self, key: Union[str, List[str]], ndim: int, subset: Optional[Union[Series, ABCDataFrame]] = None, ) -> Union[Series, ABCDataFrame]: """ Sub-classes to define. Return a sliced object. Parameters ---------- key : string / list of selections ndim : 1,2 requested ndim of result subset : object, default None subset to act on """ if subset is None: subset = self elif subset.ndim == 1: # is Series return subset # TODO: _shallow_copy(subset)? return subset[key] _agg_summary_and_see_also_doc = dedent(""" The aggregation operations are always performed over an axis, either the index (default) or the column axis. This behavior is different from `numpy` aggregation functions (`mean`, `median`, `prod`, `sum`, `std`, `var`), where the default is to compute the aggregation of the flattened array, e.g., ``numpy.mean(arr_2d)`` as opposed to ``numpy.mean(arr_2d, axis=0)``. `agg` is an alias for `aggregate`. Use the alias. See Also -------- DataFrame.apply : Perform any type of operations. DataFrame.transform : Perform transformation type operations. core.groupby.GroupBy : Perform operations over groups. core.resample.Resampler : Perform operations over resampled bins. core.window.Rolling : Perform operations over rolling window. core.window.Expanding : Perform operations over expanding window. core.window.EWM : Perform operation over exponential weighted window. """) _agg_examples_doc = dedent(""" Examples -------- >>> df = pd.DataFrame([[1, 2, 3], ... [4, 5, 6], ... [7, 8, 9], ... [np.nan, np.nan, np.nan]], ... columns=['A', 'B', 'C']) Aggregate these functions over the rows. >>> df.agg(['sum', 'min']) A B C sum 12.0 15.0 18.0 min 1.0 2.0 3.0 Different aggregations per column. >>> df.agg({'A' : ['sum', 'min'], 'B' : ['min', 'max']}) A B max NaN 8.0 min 1.0 2.0 sum 12.0 NaN Aggregate over the columns. >>> df.agg("mean", axis="columns") 0 2.0 1 5.0 2 8.0 3 NaN dtype: float64 """) @Substitution(see_also=_agg_summary_and_see_also_doc, examples=_agg_examples_doc, versionadded='\n.. versionadded:: 0.20.0\n', **_shared_doc_kwargs) @Appender(_shared_docs['aggregate']) def aggregate(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) result = None try: result, how = self._aggregate(func, axis=axis, *args, **kwargs) except TypeError: pass if result is None: return self.apply(func, axis=axis, args=args, **kwargs) return result def _aggregate(self, arg, axis=0, *args, **kwargs): if axis == 1: # NDFrame.aggregate returns a tuple, and we need to transpose # only result result, how = self.T._aggregate(arg, *args, **kwargs) result = result.T if result is not None else result return result, how return super()._aggregate(arg, *args, **kwargs) agg = aggregate @Appender(_shared_docs['transform'] % _shared_doc_kwargs) def transform(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) if axis == 1: return self.T.transform(func, *args, **kwargs).T return super().transform(func, *args, **kwargs) def apply(self, func, axis=0, broadcast=None, raw=False, reduce=None, result_type=None, args=(), **kwds): """ Apply a function along an axis of the DataFrame. Objects passed to the function are Series objects whose index is either the DataFrame's index (``axis=0``) or the DataFrame's columns (``axis=1``). By default (``result_type=None``), the final return type is inferred from the return type of the applied function. Otherwise, it depends on the `result_type` argument. Parameters ---------- func : function Function to apply to each column or row. axis : {0 or 'index', 1 or 'columns'}, default 0 Axis along which the function is applied: * 0 or 'index': apply function to each column. * 1 or 'columns': apply function to each row. broadcast : bool, optional Only relevant for aggregation functions: * ``False`` or ``None`` : returns a Series whose length is the length of the index or the number of columns (based on the `axis` parameter) * ``True`` : results will be broadcast to the original shape of the frame, the original index and columns will be retained. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by result_type='broadcast'. raw : bool, default False * ``False`` : passes each row or column as a Series to the function. * ``True`` : the passed function will receive ndarray objects instead. If you are just applying a NumPy reduction function this will achieve much better performance. reduce : bool or None, default None Try to apply reduction procedures. If the DataFrame is empty, `apply` will use `reduce` to determine whether the result should be a Series or a DataFrame. If ``reduce=None`` (the default), `apply`'s return value will be guessed by calling `func` on an empty Series (note: while guessing, exceptions raised by `func` will be ignored). If ``reduce=True`` a Series will always be returned, and if ``reduce=False`` a DataFrame will always be returned. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by ``result_type='reduce'``. result_type : {'expand', 'reduce', 'broadcast', None}, default None These only act when ``axis=1`` (columns): * 'expand' : list-like results will be turned into columns. * 'reduce' : returns a Series if possible rather than expanding list-like results. This is the opposite of 'expand'. * 'broadcast' : results will be broadcast to the original shape of the DataFrame, the original index and columns will be retained. The default behaviour (None) depends on the return value of the applied function: list-like results will be returned as a Series of those. However if the apply function returns a Series these are expanded to columns. .. versionadded:: 0.23.0 args : tuple Positional arguments to pass to `func` in addition to the array/series. **kwds Additional keyword arguments to pass as keywords arguments to `func`. Returns ------- Series or DataFrame Result of applying ``func`` along the given axis of the DataFrame. See Also -------- DataFrame.applymap: For elementwise operations. DataFrame.aggregate: Only perform aggregating type operations. DataFrame.transform: Only perform transforming type operations. Notes ----- In the current implementation apply calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[4, 9]] * 3, columns=['A', 'B']) >>> df A B 0 4 9 1 4 9 2 4 9 Using a numpy universal function (in this case the same as ``np.sqrt(df)``): >>> df.apply(np.sqrt) A B 0 2.0 3.0 1 2.0 3.0 2 2.0 3.0 Using a reducing function on either axis >>> df.apply(np.sum, axis=0) A 12 B 27 dtype: int64 >>> df.apply(np.sum, axis=1) 0 13 1 13 2 13 dtype: int64 Returning a list-like will result in a Series >>> df.apply(lambda x: [1, 2], axis=1) 0 [1, 2] 1 [1, 2] 2 [1, 2] dtype: object Passing result_type='expand' will expand list-like results to columns of a Dataframe >>> df.apply(lambda x: [1, 2], axis=1, result_type='expand') 0 1 0 1 2 1 1 2 2 1 2 Returning a Series inside the function is similar to passing ``result_type='expand'``. The resulting column names will be the Series index. >>> df.apply(lambda x: pd.Series([1, 2], index=['foo', 'bar']), axis=1) foo bar 0 1 2 1 1 2 2 1 2 Passing ``result_type='broadcast'`` will ensure the same shape result, whether list-like or scalar is returned by the function, and broadcast it along the axis. The resulting column names will be the originals. >>> df.apply(lambda x: [1, 2], axis=1, result_type='broadcast') A B 0 1 2 1 1 2 2 1 2 """ from pandas.core.apply import frame_apply op = frame_apply(self, func=func, axis=axis, broadcast=broadcast, raw=raw, reduce=reduce, result_type=result_type, args=args, kwds=kwds) return op.get_result() def applymap(self, func): """ Apply a function to a Dataframe elementwise. This method applies a function that accepts and returns a scalar to every element of a DataFrame. Parameters ---------- func : callable Python function, returns a single value from a single value. Returns ------- DataFrame Transformed DataFrame. See Also -------- DataFrame.apply : Apply a function along input axis of DataFrame. Notes ----- In the current implementation applymap calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[1, 2.12], [3.356, 4.567]]) >>> df 0 1 0 1.000 2.120 1 3.356 4.567 >>> df.applymap(lambda x: len(str(x))) 0 1 0 3 4 1 5 5 Note that a vectorized version of `func` often exists, which will be much faster. You could square each number elementwise. >>> df.applymap(lambda x: x**2) 0 1 0 1.000000 4.494400 1 11.262736 20.857489 But it's better to avoid applymap in that case. >>> df ** 2 0 1 0 1.000000 4.494400 1 11.262736 20.857489 """ # if we have a dtype == 'M8[ns]', provide boxed values def infer(x): if x.empty: return lib.map_infer(x, func) return lib.map_infer(x.astype(object).values, func) return self.apply(infer) # ---------------------------------------------------------------------- # Merging / joining methods def append(self, other, ignore_index=False, verify_integrity=False, sort=None): """ Append rows of `other` to the end of caller, returning a new object. Columns in `other` that are not in the caller are added as new columns. Parameters ---------- other : DataFrame or Series/dict-like object, or list of these The data to append. ignore_index : boolean, default False If True, do not use the index labels. verify_integrity : boolean, default False If True, raise ValueError on creating index with duplicates. sort : boolean, default None Sort columns if the columns of `self` and `other` are not aligned. The default sorting is deprecated and will change to not-sorting in a future version of pandas. Explicitly pass ``sort=True`` to silence the warning and sort. Explicitly pass ``sort=False`` to silence the warning and not sort. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- concat : General function to concatenate DataFrame or Series objects. Notes ----- If a list of dict/series is passed and the keys are all contained in the DataFrame's index, the order of the columns in the resulting DataFrame will be unchanged. Iteratively appending rows to a DataFrame can be more computationally intensive than a single concatenate. A better solution is to append those rows to a list and then concatenate the list with the original DataFrame all at once. Examples -------- >>> df = pd.DataFrame([[1, 2], [3, 4]], columns=list('AB')) >>> df A B 0 1 2 1 3 4 >>> df2 = pd.DataFrame([[5, 6], [7, 8]], columns=list('AB')) >>> df.append(df2) A B 0 1 2 1 3 4 0 5 6 1 7 8 With `ignore_index` set to True: >>> df.append(df2, ignore_index=True) A B 0 1 2 1 3 4 2 5 6 3 7 8 The following, while not recommended methods for generating DataFrames, show two ways to generate a DataFrame from multiple data sources. Less efficient: >>> df = pd.DataFrame(columns=['A']) >>> for i in range(5): ... df = df.append({'A': i}, ignore_index=True) >>> df A 0 0 1 1 2 2 3 3 4 4 More efficient: >>> pd.concat([pd.DataFrame([i], columns=['A']) for i in range(5)], ... ignore_index=True) A 0 0 1 1 2 2 3 3 4 4 """ if isinstance(other, (Series, dict)): if isinstance(other, dict): other = Series(other) if other.name is None and not ignore_index: raise TypeError('Can only append a Series if ignore_index=True' ' or if the Series has a name') if other.name is None: index = None else: # other must have the same index name as self, otherwise # index name will be reset index = Index([other.name], name=self.index.name) idx_diff = other.index.difference(self.columns) try: combined_columns = self.columns.append(idx_diff) except TypeError: combined_columns = self.columns.astype(object).append(idx_diff) other = other.reindex(combined_columns, copy=False) other = DataFrame(other.values.reshape((1, len(other))), index=index, columns=combined_columns) other = other._convert(datetime=True, timedelta=True) if not self.columns.equals(combined_columns): self = self.reindex(columns=combined_columns) elif isinstance(other, list) and not isinstance(other[0], DataFrame): other = DataFrame(other) if (self.columns.get_indexer(other.columns) >= 0).all(): other = other.reindex(columns=self.columns) from pandas.core.reshape.concat import concat if isinstance(other, (list, tuple)): to_concat = [self] + other else: to_concat = [self, other] return concat(to_concat, ignore_index=ignore_index, verify_integrity=verify_integrity, sort=sort) def join(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): """ Join columns of another DataFrame. Join columns with `other` DataFrame either on index or on a key column. Efficiently join multiple DataFrame objects by index at once by passing a list. Parameters ---------- other : DataFrame, Series, or list of DataFrame Index should be similar to one of the columns in this one. If a Series is passed, its name attribute must be set, and that will be used as the column name in the resulting joined DataFrame. on : str, list of str, or array-like, optional Column or index level name(s) in the caller to join on the index in `other`, otherwise joins index-on-index. If multiple values given, the `other` DataFrame must have a MultiIndex. Can pass an array as the join key if it is not already contained in the calling DataFrame. Like an Excel VLOOKUP operation. how : {'left', 'right', 'outer', 'inner'}, default 'left' How to handle the operation of the two objects. * left: use calling frame's index (or column if on is specified) * right: use `other`'s index. * outer: form union of calling frame's index (or column if on is specified) with `other`'s index, and sort it. lexicographically. * inner: form intersection of calling frame's index (or column if on is specified) with `other`'s index, preserving the order of the calling's one. lsuffix : str, default '' Suffix to use from left frame's overlapping columns. rsuffix : str, default '' Suffix to use from right frame's overlapping columns. sort : bool, default False Order result DataFrame lexicographically by the join key. If False, the order of the join key depends on the join type (how keyword). Returns ------- DataFrame A dataframe containing columns from both the caller and `other`. See Also -------- DataFrame.merge : For column(s)-on-columns(s) operations. Notes ----- Parameters `on`, `lsuffix`, and `rsuffix` are not supported when passing a list of `DataFrame` objects. Support for specifying index levels as the `on` parameter was added in version 0.23.0. Examples -------- >>> df = pd.DataFrame({'key': ['K0', 'K1', 'K2', 'K3', 'K4', 'K5'], ... 'A': ['A0', 'A1', 'A2', 'A3', 'A4', 'A5']}) >>> df key A 0 K0 A0 1 K1 A1 2 K2 A2 3 K3 A3 4 K4 A4 5 K5 A5 >>> other = pd.DataFrame({'key': ['K0', 'K1', 'K2'], ... 'B': ['B0', 'B1', 'B2']}) >>> other key B 0 K0 B0 1 K1 B1 2 K2 B2 Join DataFrames using their indexes. >>> df.join(other, lsuffix='_caller', rsuffix='_other') key_caller A key_other B 0 K0 A0 K0 B0 1 K1 A1 K1 B1 2 K2 A2 K2 B2 3 K3 A3 NaN NaN 4 K4 A4 NaN NaN 5 K5 A5 NaN NaN If we want to join using the key columns, we need to set key to be the index in both `df` and `other`. The joined DataFrame will have key as its index. >>> df.set_index('key').join(other.set_index('key')) A B key K0 A0 B0 K1 A1 B1 K2 A2 B2 K3 A3 NaN K4 A4 NaN K5 A5 NaN Another option to join using the key columns is to use the `on` parameter. DataFrame.join always uses `other`'s index but we can use any column in `df`. This method preserves the original DataFrame's index in the result. >>> df.join(other.set_index('key'), on='key') key A B 0 K0 A0 B0 1 K1 A1 B1 2 K2 A2 B2 3 K3 A3 NaN 4 K4 A4 NaN 5 K5 A5 NaN """ # For SparseDataFrame's benefit return self._join_compat(other, on=on, how=how, lsuffix=lsuffix, rsuffix=rsuffix, sort=sort) def _join_compat(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): from pandas.core.reshape.merge import merge from pandas.core.reshape.concat import concat if isinstance(other, Series): if other.name is None: raise ValueError('Other Series must have a name') other = DataFrame({other.name: other}) if isinstance(other, DataFrame): return merge(self, other, left_on=on, how=how, left_index=on is None, right_index=True, suffixes=(lsuffix, rsuffix), sort=sort) else: if on is not None: raise ValueError('Joining multiple DataFrames only supported' ' for joining on index') frames = [self] + list(other) can_concat = all(df.index.is_unique for df in frames) # join indexes only using concat if can_concat: if how == 'left': how = 'outer' join_axes = [self.index] else: join_axes = None return concat(frames, axis=1, join=how, join_axes=join_axes, verify_integrity=True) joined = frames[0] for frame in frames[1:]: joined = merge(joined, frame, how=how, left_index=True, right_index=True) return joined @Substitution('') @Appender(_merge_doc, indents=2) def merge(self, right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True, indicator=False, validate=None): from pandas.core.reshape.merge import merge return merge(self, right, how=how, on=on, left_on=left_on, right_on=right_on, left_index=left_index, right_index=right_index, sort=sort, suffixes=suffixes, copy=copy, indicator=indicator, validate=validate) def round(self, decimals=0, *args, **kwargs): """ Round a DataFrame to a variable number of decimal places. Parameters ---------- decimals : int, dict, Series Number of decimal places to round each column to. If an int is given, round each column to the same number of places. Otherwise dict and Series round to variable numbers of places. Column names should be in the keys if `decimals` is a dict-like, or in the index if `decimals` is a Series. Any columns not included in `decimals` will be left as is. Elements of `decimals` which are not columns of the input will be ignored. *args Additional keywords have no effect but might be accepted for compatibility with numpy. **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame A DataFrame with the affected columns rounded to the specified number of decimal places. See Also -------- numpy.around : Round a numpy array to the given number of decimals. Series.round : Round a Series to the given number of decimals. Examples -------- >>> df = pd.DataFrame([(.21, .32), (.01, .67), (.66, .03), (.21, .18)], ... columns=['dogs', 'cats']) >>> df dogs cats 0 0.21 0.32 1 0.01 0.67 2 0.66 0.03 3 0.21 0.18 By providing an integer each column is rounded to the same number of decimal places >>> df.round(1) dogs cats 0 0.2 0.3 1 0.0 0.7 2 0.7 0.0 3 0.2 0.2 With a dict, the number of places for specific columns can be specified with the column names as key and the number of decimal places as value >>> df.round({'dogs': 1, 'cats': 0}) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 Using a Series, the number of places for specific columns can be specified with the column names as index and the number of decimal places as value >>> decimals = pd.Series([0, 1], index=['cats', 'dogs']) >>> df.round(decimals) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 """ from pandas.core.reshape.concat import concat def _dict_round(df, decimals): for col, vals in df.iteritems(): try: yield _series_round(vals, decimals[col]) except KeyError: yield vals def _series_round(s, decimals): if is_integer_dtype(s) or is_float_dtype(s): return s.round(decimals) return s nv.validate_round(args, kwargs) if isinstance(decimals, (dict, Series)): if isinstance(decimals, Series): if not decimals.index.is_unique: raise ValueError("Index of decimals must be unique") new_cols = [col for col in _dict_round(self, decimals)] elif is_integer(decimals): # Dispatch to Series.round new_cols = [_series_round(v, decimals) for _, v in self.iteritems()] else: raise TypeError("decimals must be an integer, a dict-like or a " "Series") if len(new_cols) > 0: return self._constructor(concat(new_cols, axis=1), index=self.index, columns=self.columns) else: return self # ---------------------------------------------------------------------- # Statistical methods, etc. def corr(self, method='pearson', min_periods=1): """ Compute pairwise correlation of columns, excluding NA/null values. Parameters ---------- method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float. Note that the returned matrix from corr will have 1 along the diagonals and will be symmetric regardless of the callable's behavior .. versionadded:: 0.24.0 min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Currently only available for Pearson and Spearman correlation. Returns ------- DataFrame Correlation matrix. See Also -------- DataFrame.corrwith Series.corr Examples -------- >>> def histogram_intersection(a, b): ... v = np.minimum(a, b).sum().round(decimals=1) ... return v >>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats']) >>> df.corr(method=histogram_intersection) dogs cats dogs 1.0 0.3 cats 0.3 1.0 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if method == 'pearson': correl = libalgos.nancorr(ensure_float64(mat), minp=min_periods) elif method == 'spearman': correl = libalgos.nancorr_spearman(ensure_float64(mat), minp=min_periods) elif method == 'kendall' or callable(method): if min_periods is None: min_periods = 1 mat = ensure_float64(mat).T corrf = nanops.get_corr_func(method) K = len(cols) correl = np.empty((K, K), dtype=float) mask = np.isfinite(mat) for i, ac in enumerate(mat): for j, bc in enumerate(mat): if i > j: continue valid = mask[i] & mask[j] if valid.sum() < min_periods: c = np.nan elif i == j: c = 1. elif not valid.all(): c = corrf(ac[valid], bc[valid]) else: c = corrf(ac, bc) correl[i, j] = c correl[j, i] = c else: raise ValueError("method must be either 'pearson', " "'spearman', 'kendall', or a callable, " "'{method}' was supplied".format(method=method)) return self._constructor(correl, index=idx, columns=cols) def cov(self, min_periods=None): """ Compute pairwise covariance of columns, excluding NA/null values. Compute the pairwise covariance among the series of a DataFrame. The returned data frame is the `covariance matrix <https://en.wikipedia.org/wiki/Covariance_matrix>`__ of the columns of the DataFrame. Both NA and null values are automatically excluded from the calculation. (See the note below about bias from missing values.) A threshold can be set for the minimum number of observations for each value created. Comparisons with observations below this threshold will be returned as ``NaN``. This method is generally used for the analysis of time series data to understand the relationship between different measures across time. Parameters ---------- min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Returns ------- DataFrame The covariance matrix of the series of the DataFrame. See Also -------- Series.cov : Compute covariance with another Series. core.window.EWM.cov: Exponential weighted sample covariance. core.window.Expanding.cov : Expanding sample covariance. core.window.Rolling.cov : Rolling sample covariance. Notes ----- Returns the covariance matrix of the DataFrame's time series. The covariance is normalized by N-1. For DataFrames that have Series that are missing data (assuming that data is `missing at random <https://en.wikipedia.org/wiki/Missing_data#Missing_at_random>`__) the returned covariance matrix will be an unbiased estimate of the variance and covariance between the member Series. However, for many applications this estimate may not be acceptable because the estimate covariance matrix is not guaranteed to be positive semi-definite. This could lead to estimate correlations having absolute values which are greater than one, and/or a non-invertible covariance matrix. See `Estimation of covariance matrices <http://en.wikipedia.org/w/index.php?title=Estimation_of_covariance_ matrices>`__ for more details. Examples -------- >>> df = pd.DataFrame([(1, 2), (0, 3), (2, 0), (1, 1)], ... columns=['dogs', 'cats']) >>> df.cov() dogs cats dogs 0.666667 -1.000000 cats -1.000000 1.666667 >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(1000, 5), ... columns=['a', 'b', 'c', 'd', 'e']) >>> df.cov() a b c d e a 0.998438 -0.020161 0.059277 -0.008943 0.014144 b -0.020161 1.059352 -0.008543 -0.024738 0.009826 c 0.059277 -0.008543 1.010670 -0.001486 -0.000271 d -0.008943 -0.024738 -0.001486 0.921297 -0.013692 e 0.014144 0.009826 -0.000271 -0.013692 0.977795 **Minimum number of periods** This method also supports an optional ``min_periods`` keyword that specifies the required minimum number of non-NA observations for each column pair in order to have a valid result: >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(20, 3), ... columns=['a', 'b', 'c']) >>> df.loc[df.index[:5], 'a'] = np.nan >>> df.loc[df.index[5:10], 'b'] = np.nan >>> df.cov(min_periods=12) a b c a 0.316741 NaN -0.150812 b NaN 1.248003 0.191417 c -0.150812 0.191417 0.895202 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if notna(mat).all(): if min_periods is not None and min_periods > len(mat): baseCov = np.empty((mat.shape[1], mat.shape[1])) baseCov.fill(np.nan) else: baseCov = np.cov(mat.T) baseCov = baseCov.reshape((len(cols), len(cols))) else: baseCov = libalgos.nancorr(ensure_float64(mat), cov=True, minp=min_periods) return self._constructor(baseCov, index=idx, columns=cols) def corrwith(self, other, axis=0, drop=False, method='pearson'): """ Compute pairwise correlation between rows or columns of DataFrame with rows or columns of Series or DataFrame. DataFrames are first aligned along both axes before computing the correlations. Parameters ---------- other : DataFrame, Series Object with which to compute correlations. axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' to compute column-wise, 1 or 'columns' for row-wise. drop : bool, default False Drop missing indices from result. method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float .. versionadded:: 0.24.0 Returns ------- Series Pairwise correlations. See Also -------- DataFrame.corr """ axis = self._get_axis_number(axis) this = self._get_numeric_data() if isinstance(other, Series): return this.apply(lambda x: other.corr(x, method=method), axis=axis) other = other._get_numeric_data() left, right = this.align(other, join='inner', copy=False) if axis == 1: left = left.T right = right.T if method == 'pearson': # mask missing values left = left + right * 0 right = right + left * 0 # demeaned data ldem = left - left.mean() rdem = right - right.mean() num = (ldem * rdem).sum() dom = (left.count() - 1) * left.std() * right.std() correl = num / dom elif method in ['kendall', 'spearman'] or callable(method): def c(x): return nanops.nancorr(x[0], x[1], method=method) correl = Series(map(c, zip(left.values.T, right.values.T)), index=left.columns) else: raise ValueError("Invalid method {method} was passed, " "valid methods are: 'pearson', 'kendall', " "'spearman', or callable". format(method=method)) if not drop: # Find non-matching labels along the given axis # and append missing correlations (GH 22375) raxis = 1 if axis == 0 else 0 result_index = (this._get_axis(raxis). union(other._get_axis(raxis))) idx_diff = result_index.difference(correl.index) if len(idx_diff) > 0: correl = correl.append(Series([np.nan] * len(idx_diff), index=idx_diff)) return correl # ---------------------------------------------------------------------- # ndarray-like stats methods def count(self, axis=0, level=None, numeric_only=False): """ Count non-NA cells for each column or row. The values `None`, `NaN`, `NaT`, and optionally `numpy.inf` (depending on `pandas.options.mode.use_inf_as_na`) are considered NA. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index' counts are generated for each column. If 1 or 'columns' counts are generated for each **row**. level : int or str, optional If the axis is a `MultiIndex` (hierarchical), count along a particular `level`, collapsing into a `DataFrame`. A `str` specifies the level name. numeric_only : bool, default False Include only `float`, `int` or `boolean` data. Returns ------- Series or DataFrame For each column/row the number of non-NA/null entries. If `level` is specified returns a `DataFrame`. See Also -------- Series.count: Number of non-NA elements in a Series. DataFrame.shape: Number of DataFrame rows and columns (including NA elements). DataFrame.isna: Boolean same-sized DataFrame showing places of NA elements. Examples -------- Constructing DataFrame from a dictionary: >>> df = pd.DataFrame({"Person": ... ["John", "Myla", "Lewis", "John", "Myla"], ... "Age": [24., np.nan, 21., 33, 26], ... "Single": [False, True, True, True, False]}) >>> df Person Age Single 0 John 24.0 False 1 Myla NaN True 2 Lewis 21.0 True 3 John 33.0 True 4 Myla 26.0 False Notice the uncounted NA values: >>> df.count() Person 5 Age 4 Single 5 dtype: int64 Counts for each **row**: >>> df.count(axis='columns') 0 3 1 2 2 3 3 3 4 3 dtype: int64 Counts for one level of a `MultiIndex`: >>> df.set_index(["Person", "Single"]).count(level="Person") Age Person John 2 Lewis 1 Myla 1 """ axis = self._get_axis_number(axis) if level is not None: return self._count_level(level, axis=axis, numeric_only=numeric_only) if numeric_only: frame = self._get_numeric_data() else: frame = self # GH #423 if len(frame._get_axis(axis)) == 0: result = Series(0, index=frame._get_agg_axis(axis)) else: if frame._is_mixed_type or frame._data.any_extension_types: # the or any_extension_types is really only hit for single- # column frames with an extension array result = notna(frame).sum(axis=axis) else: # GH13407 series_counts = notna(frame).sum(axis=axis) counts = series_counts.values result = Series(counts, index=frame._get_agg_axis(axis)) return result.astype('int64') def _count_level(self, level, axis=0, numeric_only=False): if numeric_only: frame = self._get_numeric_data() else: frame = self count_axis = frame._get_axis(axis) agg_axis = frame._get_agg_axis(axis) if not isinstance(count_axis, MultiIndex): raise TypeError("Can only count levels on hierarchical " "{ax}.".format(ax=self._get_axis_name(axis))) if frame._is_mixed_type: # Since we have mixed types, calling notna(frame.values) might # upcast everything to object mask = notna(frame).values else: # But use the speedup when we have homogeneous dtypes mask = notna(frame.values) if axis == 1: # We're transposing the mask rather than frame to avoid potential # upcasts to object, which induces a ~20x slowdown mask = mask.T if isinstance(level, str): level = count_axis._get_level_number(level) level_index = count_axis.levels[level] level_codes = ensure_int64(count_axis.codes[level]) counts = lib.count_level_2d(mask, level_codes, len(level_index), axis=0) result = DataFrame(counts, index=level_index, columns=agg_axis) if axis == 1: # Undo our earlier transpose return result.T else: return result def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None, filter_type=None, **kwds): if axis is None and filter_type == 'bool': labels = None constructor = None else: # TODO: Make other agg func handle axis=None properly axis = self._get_axis_number(axis) labels = self._get_agg_axis(axis) constructor = self._constructor def f(x): return op(x, axis=axis, skipna=skipna, **kwds) # exclude timedelta/datetime unless we are uniform types if (axis == 1 and self._is_datelike_mixed_type and (not self._is_homogeneous_type and not is_datetime64tz_dtype(self.dtypes[0]))): numeric_only = True if numeric_only is None: try: values = self.values result = f(values) if (filter_type == 'bool' and is_object_dtype(values) and axis is None): # work around https://github.com/numpy/numpy/issues/10489 # TODO: combine with hasattr(result, 'dtype') further down # hard since we don't have `values` down there. result = np.bool_(result) except Exception as e: # try by-column first if filter_type is None and axis == 0: try: # this can end up with a non-reduction # but not always. if the types are mixed # with datelike then need to make sure a series # we only end up here if we have not specified # numeric_only and yet we have tried a # column-by-column reduction, where we have mixed type. # So let's just do what we can from pandas.core.apply import frame_apply opa = frame_apply(self, func=f, result_type='expand', ignore_failures=True) result = opa.get_result() if result.ndim == self.ndim: result = result.iloc[0] return result except Exception: pass if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': data = self._get_bool_data() else: # pragma: no cover e = NotImplementedError( "Handling exception with filter_type {f} not" "implemented.".format(f=filter_type)) raise_with_traceback(e) with np.errstate(all='ignore'): result = f(data.values) labels = data._get_agg_axis(axis) else: if numeric_only: if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': # GH 25101, # GH 24434 data = self._get_bool_data() if axis == 0 else self else: # pragma: no cover msg = ("Generating numeric_only data with filter_type {f}" "not supported.".format(f=filter_type)) raise NotImplementedError(msg) values = data.values labels = data._get_agg_axis(axis) else: values = self.values result = f(values) if hasattr(result, 'dtype') and is_object_dtype(result.dtype): try: if filter_type is None or filter_type == 'numeric': result = result.astype(np.float64) elif filter_type == 'bool' and notna(result).all(): result = result.astype(np.bool_) except (ValueError, TypeError): # try to coerce to the original dtypes item by item if we can if axis == 0: result = coerce_to_dtypes(result, self.dtypes) if constructor is not None: result = Series(result, index=labels) return result def nunique(self, axis=0, dropna=True): """ Count distinct observations over requested axis. Return Series with number of distinct observations. Can ignore NaN values. .. versionadded:: 0.20.0 Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to use. 0 or 'index' for row-wise, 1 or 'columns' for column-wise. dropna : bool, default True Don't include NaN in the counts. Returns ------- Series See Also -------- Series.nunique: Method nunique for Series. DataFrame.count: Count non-NA cells for each column or row. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], 'B': [1, 1, 1]}) >>> df.nunique() A 3 B 1 dtype: int64 >>> df.nunique(axis=1) 0 1 1 2 2 2 dtype: int64 """ return self.apply(Series.nunique, axis=axis, dropna=dropna) def idxmin(self, axis=0, skipna=True): """ Return index of first occurrence of minimum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of minima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmin Notes ----- This method is the DataFrame version of ``ndarray.argmin``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmin(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def idxmax(self, axis=0, skipna=True): """ Return index of first occurrence of maximum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of maxima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmax Notes ----- This method is the DataFrame version of ``ndarray.argmax``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmax(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def _get_agg_axis(self, axis_num): """ Let's be explicit about this. """ if axis_num == 0: return self.columns elif axis_num == 1: return self.index else: raise ValueError('Axis must be 0 or 1 (got %r)' % axis_num) def mode(self, axis=0, numeric_only=False, dropna=True): """ Get the mode(s) of each element along the selected axis. The mode of a set of values is the value that appears most often. It can be multiple values. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to iterate over while searching for the mode: * 0 or 'index' : get mode of each column * 1 or 'columns' : get mode of each row numeric_only : bool, default False If True, only apply to numeric columns. dropna : bool, default True Don't consider counts of NaN/NaT. .. versionadded:: 0.24.0 Returns ------- DataFrame The modes of each column or row. See Also -------- Series.mode : Return the highest frequency value in a Series. Series.value_counts : Return the counts of values in a Series. Examples -------- >>> df = pd.DataFrame([('bird', 2, 2), ... ('mammal', 4, np.nan), ... ('arthropod', 8, 0), ... ('bird', 2, np.nan)], ... index=('falcon', 'horse', 'spider', 'ostrich'), ... columns=('species', 'legs', 'wings')) >>> df species legs wings falcon bird 2 2.0 horse mammal 4 NaN spider arthropod 8 0.0 ostrich bird 2 NaN By default, missing values are not considered, and the mode of wings are both 0 and 2. The second row of species and legs contains ``NaN``, because they have only one mode, but the DataFrame has two rows. >>> df.mode() species legs wings 0 bird 2.0 0.0 1 NaN NaN 2.0 Setting ``dropna=False`` ``NaN`` values are considered and they can be the mode (like for wings). >>> df.mode(dropna=False) species legs wings 0 bird 2 NaN Setting ``numeric_only=True``, only the mode of numeric columns is computed, and columns of other types are ignored. >>> df.mode(numeric_only=True) legs wings 0 2.0 0.0 1 NaN 2.0 To compute the mode over columns and not rows, use the axis parameter: >>> df.mode(axis='columns', numeric_only=True) 0 1 falcon 2.0 NaN horse 4.0 NaN spider 0.0 8.0 ostrich 2.0 NaN """ data = self if not numeric_only else self._get_numeric_data() def f(s): return s.mode(dropna=dropna) return data.apply(f, axis=axis) def quantile(self, q=0.5, axis=0, numeric_only=True, interpolation='linear'): """ Return values at the given quantile over requested axis. Parameters ---------- q : float or array-like, default 0.5 (50% quantile) Value between 0 <= q <= 1, the quantile(s) to compute. axis : {0, 1, 'index', 'columns'} (default 0) Equals 0 or 'index' for row-wise, 1 or 'columns' for column-wise. numeric_only : bool, default True If False, the quantile of datetime and timedelta data will be computed as well. interpolation : {'linear', 'lower', 'higher', 'midpoint', 'nearest'} This optional parameter specifies the interpolation method to use, when the desired quantile lies between two data points `i` and `j`: * linear: `i + (j - i) * fraction`, where `fraction` is the fractional part of the index surrounded by `i` and `j`. * lower: `i`. * higher: `j`. * nearest: `i` or `j` whichever is nearest. * midpoint: (`i` + `j`) / 2. .. versionadded:: 0.18.0 Returns ------- Series or DataFrame If ``q`` is an array, a DataFrame will be returned where the index is ``q``, the columns are the columns of self, and the values are the quantiles. If ``q`` is a float, a Series will be returned where the index is the columns of self and the values are the quantiles. See Also -------- core.window.Rolling.quantile: Rolling quantile. numpy.percentile: Numpy function to compute the percentile. Examples -------- >>> df = pd.DataFrame(np.array([[1, 1], [2, 10], [3, 100], [4, 100]]), ... columns=['a', 'b']) >>> df.quantile(.1) a 1.3 b 3.7 Name: 0.1, dtype: float64 >>> df.quantile([.1, .5]) a b 0.1 1.3 3.7 0.5 2.5 55.0 Specifying `numeric_only=False` will also compute the quantile of datetime and timedelta data. >>> df = pd.DataFrame({'A': [1, 2], ... 'B': [pd.Timestamp('2010'), ... pd.Timestamp('2011')], ... 'C': [pd.Timedelta('1 days'), ... pd.Timedelta('2 days')]}) >>> df.quantile(0.5, numeric_only=False) A 1.5 B 2010-07-02 12:00:00 C 1 days 12:00:00 Name: 0.5, dtype: object """ self._check_percentile(q) data = self._get_numeric_data() if numeric_only else self axis = self._get_axis_number(axis) is_transposed = axis == 1 if is_transposed: data = data.T result = data._data.quantile(qs=q, axis=1, interpolation=interpolation, transposed=is_transposed) if result.ndim == 2: result = self._constructor(result) else: result = self._constructor_sliced(result, name=q) if is_transposed: result = result.T return result def to_timestamp(self, freq=None, how='start', axis=0, copy=True): """ Cast to DatetimeIndex of timestamps, at *beginning* of period. Parameters ---------- freq : str, default frequency of PeriodIndex Desired frequency. how : {'s', 'e', 'start', 'end'} Convention for converting period to timestamp; start of period vs. end. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- DataFrame with DatetimeIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_timestamp(freq=freq, how=how)) elif axis == 1: new_data.set_axis(0, self.columns.to_timestamp(freq=freq, how=how)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def to_period(self, freq=None, axis=0, copy=True): """ Convert DataFrame from DatetimeIndex to PeriodIndex with desired frequency (inferred from index if not passed). Parameters ---------- freq : str, default Frequency of the PeriodIndex. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- TimeSeries with PeriodIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_period(freq=freq)) elif axis == 1: new_data.set_axis(0, self.columns.to_period(freq=freq)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def isin(self, values): """ Whether each element in the DataFrame is contained in values. Parameters ---------- values : iterable, Series, DataFrame or dict The result will only be true at a location if all the labels match. If `values` is a Series, that's the index. If `values` is a dict, the keys must be the column names, which must match. If `values` is a DataFrame, then both the index and column labels must match. Returns ------- DataFrame DataFrame of booleans showing whether each element in the DataFrame is contained in values. See Also -------- DataFrame.eq: Equality test for DataFrame. Series.isin: Equivalent method on Series. Series.str.contains: Test if pattern or regex is contained within a string of a Series or Index. Examples -------- >>> df = pd.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]}, ... index=['falcon', 'dog']) >>> df num_legs num_wings falcon 2 2 dog 4 0 When ``values`` is a list check whether every value in the DataFrame is present in the list (which animals have 0 or 2 legs or wings) >>> df.isin([0, 2]) num_legs num_wings falcon True True dog False True When ``values`` is a dict, we can pass values to check for each column separately: >>> df.isin({'num_wings': [0, 3]}) num_legs num_wings falcon False False dog False True When ``values`` is a Series or DataFrame the index and column must match. Note that 'falcon' does not match based on the number of legs in df2. >>> other = pd.DataFrame({'num_legs': [8, 2], 'num_wings': [0, 2]}, ... index=['spider', 'falcon']) >>> df.isin(other) num_legs num_wings falcon True True dog False False """ if isinstance(values, dict): from pandas.core.reshape.concat import concat values = collections.defaultdict(list, values) return concat((self.iloc[:, [i]].isin(values[col]) for i, col in enumerate(self.columns)), axis=1) elif isinstance(values, Series): if not values.index.is_unique: raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self), axis='index') elif isinstance(values, DataFrame): if not (values.columns.is_unique and values.index.is_unique): raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self)) else: if not is_list_like(values): raise TypeError("only list-like or dict-like objects are " "allowed to be passed to DataFrame.isin(), " "you passed a " "{0!r}".format(type(values).__name__)) return DataFrame( algorithms.isin(self.values.ravel(), values).reshape(self.shape), self.index, self.columns) # ---------------------------------------------------------------------- # Add plotting methods to DataFrame plot = CachedAccessor("plot", pandas.plotting.FramePlotMethods) hist = pandas.plotting.hist_frame boxplot = pandas.plotting.boxplot_frame sparse = CachedAccessor("sparse", SparseFrameAccessor) DataFrame._setup_axes(['index', 'columns'], info_axis=1, stat_axis=0, axes_are_reversed=True, aliases={'rows': 0}, docs={ 'index': 'The index (row labels) of the DataFrame.', 'columns': 'The column labels of the DataFrame.'}) DataFrame._add_numeric_operations() DataFrame._add_series_or_dataframe_operations() ops.add_flex_arithmetic_methods(DataFrame) ops.add_special_arithmetic_methods(DataFrame) def _from_nested_dict(data): # TODO: this should be seriously cythonized new_data = OrderedDict() for index, s in data.items(): for col, v in s.items(): new_data[col] = new_data.get(col, OrderedDict()) new_data[col][index] = v return new_data def _put_str(s, space): return '{s}'.format(s=s)[:space].ljust(space)
import numpy as np import pytest from pandas import DataFrame, MultiIndex, Series from pandas.core import common as com import pandas.util.testing as tm def test_detect_chained_assignment(): # Inplace ops, originally from: # http://stackoverflow.com/questions/20508968/series-fillna-in-a-multiindex-dataframe-does-not-fill-is-this-a-bug a = [12, 23] b = [123, None] c = [1234, 2345] d = [12345, 23456] tuples = [('eyes', 'left'), ('eyes', 'right'), ('ears', 'left'), ('ears', 'right')] events = {('eyes', 'left'): a, ('eyes', 'right'): b, ('ears', 'left'): c, ('ears', 'right'): d} multiind = MultiIndex.from_tuples(tuples, names=['part', 'side']) zed = DataFrame(events, index=['a', 'b'], columns=multiind) with pytest.raises(com.SettingWithCopyError): zed['eyes']['right'].fillna(value=555, inplace=True) def test_cache_updating(): # 5216 # make sure that we don't try to set a dead cache a = np.random.rand(10, 3) df = DataFrame(a, columns=['x', 'y', 'z']) tuples = [(i, j) for i in range(5) for j in range(2)] index = MultiIndex.from_tuples(tuples) df.index = index # setting via chained assignment # but actually works, since everything is a view df.loc[0]['z'].iloc[0] = 1. result = df.loc[(0, 0), 'z'] assert result == 1 # correct setting df.loc[(0, 0), 'z'] = 2 result = df.loc[(0, 0), 'z'] assert result == 2 def test_indexer_caching(): # GH5727 # make sure that indexers are in the _internal_names_set n = 1000001 arrays = (range(n), range(n)) index = MultiIndex.from_tuples(zip(*arrays)) s = Series(np.zeros(n), index=index) str(s) # setitem expected = Series(np.ones(n), index=index) s = Series(np.zeros(n), index=index) s[s == 0] = 1 tm.assert_series_equal(s, expected)
cbertinato/pandas
pandas/tests/indexing/multiindex/test_chaining_and_caching.py
pandas/core/frame.py
""" manage PyTables query interface via Expressions """ import ast from functools import partial import numpy as np from pandas._libs.tslibs import Timedelta, Timestamp from pandas.compat.chainmap import DeepChainMap from pandas.core.dtypes.common import is_list_like import pandas as pd from pandas.core.base import StringMixin import pandas.core.common as com from pandas.core.computation import expr, ops from pandas.core.computation.common import _ensure_decoded from pandas.core.computation.expr import BaseExprVisitor from pandas.core.computation.ops import UndefinedVariableError, is_term from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded class Scope(expr.Scope): __slots__ = 'queryables', def __init__(self, level, global_dict=None, local_dict=None, queryables=None): super().__init__(level + 1, global_dict=global_dict, local_dict=local_dict) self.queryables = queryables or dict() class Term(ops.Term): def __new__(cls, name, env, side=None, encoding=None): klass = Constant if not isinstance(name, str) else cls supr_new = StringMixin.__new__ return supr_new(klass) def __init__(self, name, env, side=None, encoding=None): super().__init__(name, env, side=side, encoding=encoding) def _resolve_name(self): # must be a queryables if self.side == 'left': if self.name not in self.env.queryables: raise NameError('name {name!r} is not defined' .format(name=self.name)) return self.name # resolve the rhs (and allow it to be None) try: return self.env.resolve(self.name, is_local=False) except UndefinedVariableError: return self.name # read-only property overwriting read/write property @property # type: ignore def value(self): return self._value class Constant(Term): def __init__(self, value, env, side=None, encoding=None): super().__init__(value, env, side=side, encoding=encoding) def _resolve_name(self): return self._name class BinOp(ops.BinOp): _max_selectors = 31 def __init__(self, op, lhs, rhs, queryables, encoding): super().__init__(op, lhs, rhs) self.queryables = queryables self.encoding = encoding self.filter = None self.condition = None def _disallow_scalar_only_bool_ops(self): pass def prune(self, klass): def pr(left, right): """ create and return a new specialized BinOp from myself """ if left is None: return right elif right is None: return left k = klass if isinstance(left, ConditionBinOp): if (isinstance(left, ConditionBinOp) and isinstance(right, ConditionBinOp)): k = JointConditionBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right elif isinstance(left, FilterBinOp): if (isinstance(left, FilterBinOp) and isinstance(right, FilterBinOp)): k = JointFilterBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right return k(self.op, left, right, queryables=self.queryables, encoding=self.encoding).evaluate() left, right = self.lhs, self.rhs if is_term(left) and is_term(right): res = pr(left.value, right.value) elif not is_term(left) and is_term(right): res = pr(left.prune(klass), right.value) elif is_term(left) and not is_term(right): res = pr(left.value, right.prune(klass)) elif not (is_term(left) or is_term(right)): res = pr(left.prune(klass), right.prune(klass)) return res def conform(self, rhs): """ inplace conform rhs """ if not is_list_like(rhs): rhs = [rhs] if isinstance(rhs, np.ndarray): rhs = rhs.ravel() return rhs @property def is_valid(self): """ return True if this is a valid field """ return self.lhs in self.queryables @property def is_in_table(self): """ return True if this is a valid column name for generation (e.g. an actual column in the table) """ return self.queryables.get(self.lhs) is not None @property def kind(self): """ the kind of my field """ return getattr(self.queryables.get(self.lhs), 'kind', None) @property def meta(self): """ the meta of my field """ return getattr(self.queryables.get(self.lhs), 'meta', None) @property def metadata(self): """ the metadata of my field """ return getattr(self.queryables.get(self.lhs), 'metadata', None) def generate(self, v): """ create and return the op string for this TermValue """ val = v.tostring(self.encoding) return "({lhs} {op} {val})".format(lhs=self.lhs, op=self.op, val=val) def convert_value(self, v): """ convert the expression that is in the term to something that is accepted by pytables """ def stringify(value): if self.encoding is not None: encoder = partial(pprint_thing_encoded, encoding=self.encoding) else: encoder = pprint_thing return encoder(value) kind = _ensure_decoded(self.kind) meta = _ensure_decoded(self.meta) if kind == 'datetime64' or kind == 'datetime': if isinstance(v, (int, float)): v = stringify(v) v = _ensure_decoded(v) v = Timestamp(v) if v.tz is not None: v = v.tz_convert('UTC') return TermValue(v, v.value, kind) elif kind == 'timedelta64' or kind == 'timedelta': v = Timedelta(v, unit='s').value return TermValue(int(v), v, kind) elif meta == 'category': metadata = com.values_from_object(self.metadata) result = metadata.searchsorted(v, side='left') # result returns 0 if v is first element or if v is not in metadata # check that metadata contains v if not result and v not in metadata: result = -1 return TermValue(result, result, 'integer') elif kind == 'integer': v = int(float(v)) return TermValue(v, v, kind) elif kind == 'float': v = float(v) return TermValue(v, v, kind) elif kind == 'bool': if isinstance(v, str): v = not v.strip().lower() in ['false', 'f', 'no', 'n', 'none', '0', '[]', '{}', ''] else: v = bool(v) return TermValue(v, v, kind) elif isinstance(v, str): # string quoting return TermValue(v, stringify(v), 'string') else: raise TypeError("Cannot compare {v} of type {typ} to {kind} column" .format(v=v, typ=type(v), kind=kind)) def convert_values(self): pass class FilterBinOp(BinOp): def __str__(self): return pprint_thing("[Filter : [{lhs}] -> [{op}]" .format(lhs=self.filter[0], op=self.filter[1])) def invert(self): """ invert the filter """ if self.filter is not None: f = list(self.filter) f[1] = self.generate_filter_op(invert=True) self.filter = tuple(f) return self def format(self): """ return the actual filter format """ return [self.filter] def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) rhs = self.conform(self.rhs) values = [TermValue(v, v, self.kind).value for v in rhs] if self.is_in_table: # if too many values to create the expression, use a filter instead if self.op in ['==', '!='] and len(values) > self._max_selectors: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) return self return None # equality conditions if self.op in ['==', '!=']: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) else: raise TypeError("passing a filterable condition to a non-table " "indexer [{slf}]".format(slf=self)) return self def generate_filter_op(self, invert=False): if (self.op == '!=' and not invert) or (self.op == '==' and invert): return lambda axis, vals: ~axis.isin(vals) else: return lambda axis, vals: axis.isin(vals) class JointFilterBinOp(FilterBinOp): def format(self): raise NotImplementedError("unable to collapse Joint Filters") def evaluate(self): return self class ConditionBinOp(BinOp): def __str__(self): return pprint_thing("[Condition : [{cond}]]" .format(cond=self.condition)) def invert(self): """ invert the condition """ # if self.condition is not None: # self.condition = "~(%s)" % self.condition # return self raise NotImplementedError("cannot use an invert condition when " "passing to numexpr") def format(self): """ return the actual ne format """ return self.condition def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) # convert values if we are in the table if not self.is_in_table: return None rhs = self.conform(self.rhs) values = [self.convert_value(v) for v in rhs] # equality conditions if self.op in ['==', '!=']: # too many values to create the expression? if len(values) <= self._max_selectors: vs = [self.generate(v) for v in values] self.condition = "({cond})".format(cond=' | '.join(vs)) # use a filter after reading else: return None else: self.condition = self.generate(values[0]) return self class JointConditionBinOp(ConditionBinOp): def evaluate(self): self.condition = "({lhs} {op} {rhs})".format(lhs=self.lhs.condition, op=self.op, rhs=self.rhs.condition) return self class UnaryOp(ops.UnaryOp): def prune(self, klass): if self.op != '~': raise NotImplementedError("UnaryOp only support invert type ops") operand = self.operand operand = operand.prune(klass) if operand is not None: if issubclass(klass, ConditionBinOp): if operand.condition is not None: return operand.invert() elif issubclass(klass, FilterBinOp): if operand.filter is not None: return operand.invert() return None _op_classes = {'unary': UnaryOp} class ExprVisitor(BaseExprVisitor): const_type = Constant term_type = Term def __init__(self, env, engine, parser, **kwargs): super().__init__(env, engine, parser) for bin_op in self.binary_ops: bin_node = self.binary_op_nodes_map[bin_op] setattr(self, 'visit_{node}'.format(node=bin_node), lambda node, bin_op=bin_op: partial(BinOp, bin_op, **kwargs)) def visit_UnaryOp(self, node, **kwargs): if isinstance(node.op, (ast.Not, ast.Invert)): return UnaryOp('~', self.visit(node.operand)) elif isinstance(node.op, ast.USub): return self.const_type(-self.visit(node.operand).value, self.env) elif isinstance(node.op, ast.UAdd): raise NotImplementedError('Unary addition not supported') def visit_Index(self, node, **kwargs): return self.visit(node.value).value def visit_Assign(self, node, **kwargs): cmpr = ast.Compare(ops=[ast.Eq()], left=node.targets[0], comparators=[node.value]) return self.visit(cmpr) def visit_Subscript(self, node, **kwargs): # only allow simple subscripts value = self.visit(node.value) slobj = self.visit(node.slice) try: value = value.value except AttributeError: pass try: return self.const_type(value[slobj], self.env) except TypeError: raise ValueError("cannot subscript {value!r} with " "{slobj!r}".format(value=value, slobj=slobj)) def visit_Attribute(self, node, **kwargs): attr = node.attr value = node.value ctx = node.ctx.__class__ if ctx == ast.Load: # resolve the value resolved = self.visit(value) # try to get the value to see if we are another expression try: resolved = resolved.value except (AttributeError): pass try: return self.term_type(getattr(resolved, attr), self.env) except AttributeError: # something like datetime.datetime where scope is overridden if isinstance(value, ast.Name) and value.id == attr: return resolved raise ValueError("Invalid Attribute context {name}" .format(name=ctx.__name__)) def translate_In(self, op): return ast.Eq() if isinstance(op, ast.In) else op def _rewrite_membership_op(self, node, left, right): return self.visit(node.op), node.op, left, right def _validate_where(w): """ Validate that the where statement is of the right type. The type may either be String, Expr, or list-like of Exprs. Parameters ---------- w : String term expression, Expr, or list-like of Exprs. Returns ------- where : The original where clause if the check was successful. Raises ------ TypeError : An invalid data type was passed in for w (e.g. dict). """ if not (isinstance(w, (Expr, str)) or is_list_like(w)): raise TypeError("where must be passed as a string, Expr, " "or list-like of Exprs") return w class Expr(expr.Expr): """ hold a pytables like expression, comprised of possibly multiple 'terms' Parameters ---------- where : string term expression, Expr, or list-like of Exprs queryables : a "kinds" map (dict of column name -> kind), or None if column is non-indexable encoding : an encoding that will encode the query terms Returns ------- an Expr object Examples -------- 'index>=date' "columns=['A', 'D']" 'columns=A' 'columns==A' "~(columns=['A','B'])" 'index>df.index[3] & string="bar"' '(index>df.index[3] & index<=df.index[6]) | string="bar"' "ts>=Timestamp('2012-02-01')" "major_axis>=20130101" """ def __init__(self, where, queryables=None, encoding=None, scope_level=0): where = _validate_where(where) self.encoding = encoding self.condition = None self.filter = None self.terms = None self._visitor = None # capture the environment if needed local_dict = DeepChainMap() if isinstance(where, Expr): local_dict = where.env.scope where = where.expr elif isinstance(where, (list, tuple)): for idx, w in enumerate(where): if isinstance(w, Expr): local_dict = w.env.scope else: w = _validate_where(w) where[idx] = w where = ' & '.join(map('({})'.format, com.flatten(where))) # noqa self.expr = where self.env = Scope(scope_level + 1, local_dict=local_dict) if queryables is not None and isinstance(self.expr, str): self.env.queryables.update(queryables) self._visitor = ExprVisitor(self.env, queryables=queryables, parser='pytables', engine='pytables', encoding=encoding) self.terms = self.parse() def __str__(self): if self.terms is not None: return pprint_thing(self.terms) return pprint_thing(self.expr) def evaluate(self): """ create and return the numexpr condition and filter """ try: self.condition = self.terms.prune(ConditionBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid condition".format(expr=self.expr, slf=self)) try: self.filter = self.terms.prune(FilterBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid filter".format(expr=self.expr, slf=self)) return self.condition, self.filter class TermValue: """ hold a term value the we use to construct a condition/filter """ def __init__(self, value, converted, kind): self.value = value self.converted = converted self.kind = kind def tostring(self, encoding): """ quote the string if not encoded else encode and return """ if self.kind == 'string': if encoding is not None: return self.converted return '"{converted}"'.format(converted=self.converted) elif self.kind == 'float': # python 2 str(float) is not always # round-trippable so use repr() return repr(self.converted) return self.converted def maybe_expression(s): """ loose checking if s is a pytables-acceptable expression """ if not isinstance(s, str): return False ops = ExprVisitor.binary_ops + ExprVisitor.unary_ops + ('=',) # make sure we have an op at least return any(op in s for op in ops)
import numpy as np import pytest from pandas import DataFrame, MultiIndex, Series from pandas.core import common as com import pandas.util.testing as tm def test_detect_chained_assignment(): # Inplace ops, originally from: # http://stackoverflow.com/questions/20508968/series-fillna-in-a-multiindex-dataframe-does-not-fill-is-this-a-bug a = [12, 23] b = [123, None] c = [1234, 2345] d = [12345, 23456] tuples = [('eyes', 'left'), ('eyes', 'right'), ('ears', 'left'), ('ears', 'right')] events = {('eyes', 'left'): a, ('eyes', 'right'): b, ('ears', 'left'): c, ('ears', 'right'): d} multiind = MultiIndex.from_tuples(tuples, names=['part', 'side']) zed = DataFrame(events, index=['a', 'b'], columns=multiind) with pytest.raises(com.SettingWithCopyError): zed['eyes']['right'].fillna(value=555, inplace=True) def test_cache_updating(): # 5216 # make sure that we don't try to set a dead cache a = np.random.rand(10, 3) df = DataFrame(a, columns=['x', 'y', 'z']) tuples = [(i, j) for i in range(5) for j in range(2)] index = MultiIndex.from_tuples(tuples) df.index = index # setting via chained assignment # but actually works, since everything is a view df.loc[0]['z'].iloc[0] = 1. result = df.loc[(0, 0), 'z'] assert result == 1 # correct setting df.loc[(0, 0), 'z'] = 2 result = df.loc[(0, 0), 'z'] assert result == 2 def test_indexer_caching(): # GH5727 # make sure that indexers are in the _internal_names_set n = 1000001 arrays = (range(n), range(n)) index = MultiIndex.from_tuples(zip(*arrays)) s = Series(np.zeros(n), index=index) str(s) # setitem expected = Series(np.ones(n), index=index) s = Series(np.zeros(n), index=index) s[s == 0] = 1 tm.assert_series_equal(s, expected)
cbertinato/pandas
pandas/tests/indexing/multiindex/test_chaining_and_caching.py
pandas/core/computation/pytables.py
import numpy as np import pytest import pandas as pd from pandas import Index, MultiIndex @pytest.fixture def idx(): # a MultiIndex used to test the general functionality of the # general functionality of this object major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 2, 3, 3]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def idx_dup(): # compare tests/indexes/multi/conftest.py major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 0, 1, 1]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def index_names(): # names that match those in the idx fixture for testing equality of # names assigned to the idx return ['first', 'second'] @pytest.fixture def holder(): # the MultiIndex constructor used to base compatibility with pickle return MultiIndex @pytest.fixture def compat_props(): # a MultiIndex must have these properties associated with it return ['shape', 'ndim', 'size'] @pytest.fixture def narrow_multi_index(): """ Return a MultiIndex that is narrower than the display (<80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) return pd.MultiIndex.from_arrays([ci, ci.codes + 9, dti], names=['a', 'b', 'dti']) @pytest.fixture def wide_multi_index(): """ Return a MultiIndex that is wider than the display (>80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) levels = [ci, ci.codes + 9, dti, dti, dti] names = ['a', 'b', 'dti_1', 'dti_2', 'dti_3'] return pd.MultiIndex.from_arrays(levels, names=names)
import numpy as np import pytest from pandas import DataFrame, MultiIndex, Series from pandas.core import common as com import pandas.util.testing as tm def test_detect_chained_assignment(): # Inplace ops, originally from: # http://stackoverflow.com/questions/20508968/series-fillna-in-a-multiindex-dataframe-does-not-fill-is-this-a-bug a = [12, 23] b = [123, None] c = [1234, 2345] d = [12345, 23456] tuples = [('eyes', 'left'), ('eyes', 'right'), ('ears', 'left'), ('ears', 'right')] events = {('eyes', 'left'): a, ('eyes', 'right'): b, ('ears', 'left'): c, ('ears', 'right'): d} multiind = MultiIndex.from_tuples(tuples, names=['part', 'side']) zed = DataFrame(events, index=['a', 'b'], columns=multiind) with pytest.raises(com.SettingWithCopyError): zed['eyes']['right'].fillna(value=555, inplace=True) def test_cache_updating(): # 5216 # make sure that we don't try to set a dead cache a = np.random.rand(10, 3) df = DataFrame(a, columns=['x', 'y', 'z']) tuples = [(i, j) for i in range(5) for j in range(2)] index = MultiIndex.from_tuples(tuples) df.index = index # setting via chained assignment # but actually works, since everything is a view df.loc[0]['z'].iloc[0] = 1. result = df.loc[(0, 0), 'z'] assert result == 1 # correct setting df.loc[(0, 0), 'z'] = 2 result = df.loc[(0, 0), 'z'] assert result == 2 def test_indexer_caching(): # GH5727 # make sure that indexers are in the _internal_names_set n = 1000001 arrays = (range(n), range(n)) index = MultiIndex.from_tuples(zip(*arrays)) s = Series(np.zeros(n), index=index) str(s) # setitem expected = Series(np.ones(n), index=index) s = Series(np.zeros(n), index=index) s[s == 0] = 1 tm.assert_series_equal(s, expected)
cbertinato/pandas
pandas/tests/indexing/multiindex/test_chaining_and_caching.py
pandas/tests/indexes/multi/conftest.py
import numpy as np import pytest from pandas._libs.tslib import iNaT from pandas.core.dtypes.dtypes import CategoricalDtype import pandas as pd from pandas import ( CategoricalIndex, DatetimeIndex, Index, Int64Index, IntervalIndex, MultiIndex, PeriodIndex, RangeIndex, Series, TimedeltaIndex, UInt64Index, isna) from pandas.core.indexes.base import InvalidIndexError from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin import pandas.util.testing as tm class Base: """ base class for index sub-class tests """ _holder = None _compat_props = ['shape', 'ndim', 'size', 'nbytes'] def setup_indices(self): for name, idx in self.indices.items(): setattr(self, name, idx) def test_pickle_compat_construction(self): # need an object to create with msg = (r"Index\(\.\.\.\) must be called with a collection of some" r" kind, None was passed|" r"__new__\(\) missing 1 required positional argument: 'data'|" r"__new__\(\) takes at least 2 arguments \(1 given\)") with pytest.raises(TypeError, match=msg): self._holder() def test_to_series(self): # assert that we are creating a copy of the index idx = self.create_index() s = idx.to_series() assert s.values is not idx.values assert s.index is not idx assert s.name == idx.name def test_to_series_with_arguments(self): # GH18699 # index kwarg idx = self.create_index() s = idx.to_series(index=idx) assert s.values is not idx.values assert s.index is idx assert s.name == idx.name # name kwarg idx = self.create_index() s = idx.to_series(name='__test') assert s.values is not idx.values assert s.index is not idx assert s.name != idx.name @pytest.mark.parametrize("name", [None, "new_name"]) def test_to_frame(self, name): # see GH-15230, GH-22580 idx = self.create_index() if name: idx_name = name else: idx_name = idx.name or 0 df = idx.to_frame(name=idx_name) assert df.index is idx assert len(df.columns) == 1 assert df.columns[0] == idx_name assert df[idx_name].values is not idx.values df = idx.to_frame(index=False, name=idx_name) assert df.index is not idx def test_to_frame_datetime_tz(self): # GH 25809 idx = pd.date_range(start='2019-01-01', end='2019-01-30', freq='D') idx = idx.tz_localize('UTC') result = idx.to_frame() expected = pd.DataFrame(idx, index=idx) tm.assert_frame_equal(result, expected) def test_shift(self): # GH8083 test the base class for shift idx = self.create_index() msg = "Not supported for type {}".format(type(idx).__name__) with pytest.raises(NotImplementedError, match=msg): idx.shift(1) with pytest.raises(NotImplementedError, match=msg): idx.shift(1, 2) def test_create_index_existing_name(self): # GH11193, when an existing index is passed, and a new name is not # specified, the new index should inherit the previous object name expected = self.create_index() if not isinstance(expected, MultiIndex): expected.name = 'foo' result = pd.Index(expected) tm.assert_index_equal(result, expected) result = pd.Index(expected, name='bar') expected.name = 'bar' tm.assert_index_equal(result, expected) else: expected.names = ['foo', 'bar'] result = pd.Index(expected) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['foo', 'bar'])) result = pd.Index(expected, names=['A', 'B']) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['A', 'B'])) def test_numeric_compat(self): idx = self.create_index() with pytest.raises(TypeError, match="cannot perform __mul__"): idx * 1 with pytest.raises(TypeError, match="cannot perform __rmul__"): 1 * idx div_err = "cannot perform __truediv__" with pytest.raises(TypeError, match=div_err): idx / 1 div_err = div_err.replace(' __', ' __r') with pytest.raises(TypeError, match=div_err): 1 / idx with pytest.raises(TypeError, match="cannot perform __floordiv__"): idx // 1 with pytest.raises(TypeError, match="cannot perform __rfloordiv__"): 1 // idx def test_logical_compat(self): idx = self.create_index() with pytest.raises(TypeError, match='cannot perform all'): idx.all() with pytest.raises(TypeError, match='cannot perform any'): idx.any() def test_boolean_context_compat(self): # boolean context compat idx = self.create_index() with pytest.raises(ValueError, match='The truth value of a'): if idx: pass def test_reindex_base(self): idx = self.create_index() expected = np.arange(idx.size, dtype=np.intp) actual = idx.get_indexer(idx) tm.assert_numpy_array_equal(expected, actual) with pytest.raises(ValueError, match='Invalid fill method'): idx.get_indexer(idx, method='invalid') def test_get_indexer_consistency(self): # See GH 16819 for name, index in self.indices.items(): if isinstance(index, IntervalIndex): continue if index.is_unique or isinstance(index, CategoricalIndex): indexer = index.get_indexer(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp else: e = "Reindexing only valid with uniquely valued Index objects" with pytest.raises(InvalidIndexError, match=e): index.get_indexer(index[0:2]) indexer, _ = index.get_indexer_non_unique(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp def test_ndarray_compat_properties(self): idx = self.create_index() assert idx.T.equals(idx) assert idx.transpose().equals(idx) values = idx.values for prop in self._compat_props: assert getattr(idx, prop) == getattr(values, prop) # test for validity idx.nbytes idx.values.nbytes def test_repr_roundtrip(self): idx = self.create_index() tm.assert_index_equal(eval(repr(idx)), idx) def test_str(self): # test the string repr idx = self.create_index() idx.name = 'foo' assert "'foo'" in str(idx) assert idx.__class__.__name__ in str(idx) def test_repr_max_seq_item_setting(self): # GH10182 idx = self.create_index() idx = idx.repeat(50) with pd.option_context("display.max_seq_items", None): repr(idx) assert '...' not in str(idx) def test_copy_name(self): # gh-12309: Check that the "name" argument # passed at initialization is honored. for name, index in self.indices.items(): if isinstance(index, MultiIndex): continue first = index.__class__(index, copy=True, name='mario') second = first.__class__(first, copy=False) # Even though "copy=False", we want a new object. assert first is not second # Not using tm.assert_index_equal() since names differ. assert index.equals(first) assert first.name == 'mario' assert second.name == 'mario' s1 = Series(2, index=first) s2 = Series(3, index=second[:-1]) if not isinstance(index, CategoricalIndex): # See gh-13365 s3 = s1 * s2 assert s3.index.name == 'mario' def test_ensure_copied_data(self): # Check the "copy" argument of each Index.__new__ is honoured # GH12309 for name, index in self.indices.items(): init_kwargs = {} if isinstance(index, PeriodIndex): # Needs "freq" specification: init_kwargs['freq'] = index.freq elif isinstance(index, (RangeIndex, MultiIndex, CategoricalIndex)): # RangeIndex cannot be initialized from data # MultiIndex and CategoricalIndex are tested separately continue index_type = index.__class__ result = index_type(index.values, copy=True, **init_kwargs) tm.assert_index_equal(index, result) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='copy') if isinstance(index, PeriodIndex): # .values an object array of Period, thus copied result = index_type(ordinal=index.asi8, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') elif isinstance(index, IntervalIndex): # checked in test_interval.py pass else: result = index_type(index.values, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index.values, result.values, check_same='same') tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') def test_memory_usage(self): for name, index in self.indices.items(): result = index.memory_usage() if len(index): index.get_loc(index[0]) result2 = index.memory_usage() result3 = index.memory_usage(deep=True) # RangeIndex, IntervalIndex # don't have engines if not isinstance(index, (RangeIndex, IntervalIndex)): assert result2 > result if index.inferred_type == 'object': assert result3 > result2 else: # we report 0 for no-length assert result == 0 def test_argsort(self): for k, ind in self.indices.items(): # separately tested if k in ['catIndex']: continue result = ind.argsort() expected = np.array(ind).argsort() tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_numpy_argsort(self): for k, ind in self.indices.items(): result = np.argsort(ind) expected = ind.argsort() tm.assert_numpy_array_equal(result, expected) # these are the only two types that perform # pandas compatibility input validation - the # rest already perform separate (or no) such # validation via their 'values' attribute as # defined in pandas.core.indexes/base.py - they # cannot be changed at the moment due to # backwards compatibility concerns if isinstance(type(ind), (CategoricalIndex, RangeIndex)): msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, axis=1) msg = "the 'kind' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, kind='mergesort') msg = "the 'order' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, order=('a', 'b')) def test_take(self): indexer = [4, 3, 0, 2] for k, ind in self.indices.items(): # separate if k in ['boolIndex', 'tuples', 'empty']: continue result = ind.take(indexer) expected = ind[indexer] assert result.equals(expected) if not isinstance(ind, (DatetimeIndex, PeriodIndex, TimedeltaIndex)): # GH 10791 with pytest.raises(AttributeError): ind.freq def test_take_invalid_kwargs(self): idx = self.create_index() indices = [1, 2] msg = r"take\(\) got an unexpected keyword argument 'foo'" with pytest.raises(TypeError, match=msg): idx.take(indices, foo=2) msg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, out=indices) msg = "the 'mode' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, mode='clip') def test_repeat(self): rep = 2 i = self.create_index() expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) i = self.create_index() rep = np.arange(len(i)) expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) def test_numpy_repeat(self): rep = 2 i = self.create_index() expected = i.repeat(rep) tm.assert_index_equal(np.repeat(i, rep), expected) msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.repeat(i, rep, axis=0) @pytest.mark.parametrize('klass', [list, tuple, np.array, Series]) def test_where(self, klass): i = self.create_index() cond = [True] * len(i) result = i.where(klass(cond)) expected = i tm.assert_index_equal(result, expected) cond = [False] + [True] * len(i[1:]) expected = pd.Index([i._na_value] + i[1:].tolist(), dtype=i.dtype) result = i.where(klass(cond)) tm.assert_index_equal(result, expected) @pytest.mark.parametrize("case", [0.5, "xxx"]) @pytest.mark.parametrize("method", ["intersection", "union", "difference", "symmetric_difference"]) def test_set_ops_error_cases(self, case, method): for name, idx in self.indices.items(): # non-iterable input msg = "Input must be Index or array-like" with pytest.raises(TypeError, match=msg): getattr(idx, method)(case) def test_intersection_base(self): for name, idx in self.indices.items(): first = idx[:5] second = idx[:3] intersect = first.intersection(second) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(intersect, second) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.intersection(case) assert tm.equalContents(result, second) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.intersection([1, 2, 3]) def test_union_base(self): for name, idx in self.indices.items(): first = idx[3:] second = idx[:5] everything = idx union = first.union(second) assert tm.equalContents(union, everything) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.union(case) assert tm.equalContents(result, everything) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.union([1, 2, 3]) @pytest.mark.parametrize("sort", [None, False]) def test_difference_base(self, sort): for name, idx in self.indices.items(): first = idx[2:] second = idx[:4] answer = idx[4:] result = first.difference(second, sort) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass elif isinstance(idx, (DatetimeIndex, TimedeltaIndex)): assert result.__class__ == answer.__class__ tm.assert_numpy_array_equal(result.sort_values().asi8, answer.sort_values().asi8) else: result = first.difference(case, sort) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.difference([1, 2, 3], sort) def test_symmetric_difference(self): for name, idx in self.indices.items(): first = idx[1:] second = idx[:-1] if isinstance(idx, CategoricalIndex): pass else: answer = idx[[0, -1]] result = first.symmetric_difference(second) assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.symmetric_difference(case) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.symmetric_difference([1, 2, 3]) def test_insert_base(self): for name, idx in self.indices.items(): result = idx[1:4] if not len(idx): continue # test 0th element assert idx[0:4].equals(result.insert(0, idx[0])) def test_delete_base(self): for name, idx in self.indices.items(): if not len(idx): continue if isinstance(idx, RangeIndex): # tested in class continue expected = idx[1:] result = idx.delete(0) assert result.equals(expected) assert result.name == expected.name expected = idx[:-1] result = idx.delete(-1) assert result.equals(expected) assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version idx.delete(len(idx)) def test_equals(self): for name, idx in self.indices.items(): assert idx.equals(idx) assert idx.equals(idx.copy()) assert idx.equals(idx.astype(object)) assert not idx.equals(list(idx)) assert not idx.equals(np.array(idx)) # Cannot pass in non-int64 dtype to RangeIndex if not isinstance(idx, RangeIndex): same_values = Index(idx, dtype=object) assert idx.equals(same_values) assert same_values.equals(idx) if idx.nlevels == 1: # do not test MultiIndex assert not idx.equals(pd.Series(idx)) def test_equals_op(self): # GH9947, GH10637 index_a = self.create_index() if isinstance(index_a, PeriodIndex): pytest.skip('Skip check for PeriodIndex') n = len(index_a) index_b = index_a[0:-1] index_c = index_a[0:-1].append(index_a[-2:-1]) index_d = index_a[0:1] msg = "Lengths must match|could not be broadcast" with pytest.raises(ValueError, match=msg): index_a == index_b expected1 = np.array([True] * n) expected2 = np.array([True] * (n - 1) + [False]) tm.assert_numpy_array_equal(index_a == index_a, expected1) tm.assert_numpy_array_equal(index_a == index_c, expected2) # test comparisons with numpy arrays array_a = np.array(index_a) array_b = np.array(index_a[0:-1]) array_c = np.array(index_a[0:-1].append(index_a[-2:-1])) array_d = np.array(index_a[0:1]) with pytest.raises(ValueError, match=msg): index_a == array_b tm.assert_numpy_array_equal(index_a == array_a, expected1) tm.assert_numpy_array_equal(index_a == array_c, expected2) # test comparisons with Series series_a = Series(array_a) series_b = Series(array_b) series_c = Series(array_c) series_d = Series(array_d) with pytest.raises(ValueError, match=msg): index_a == series_b tm.assert_numpy_array_equal(index_a == series_a, expected1) tm.assert_numpy_array_equal(index_a == series_c, expected2) # cases where length is 1 for one of them with pytest.raises(ValueError, match="Lengths must match"): index_a == index_d with pytest.raises(ValueError, match="Lengths must match"): index_a == series_d with pytest.raises(ValueError, match="Lengths must match"): index_a == array_d msg = "Can only compare identically-labeled Series objects" with pytest.raises(ValueError, match=msg): series_a == series_d with pytest.raises(ValueError, match="Lengths must match"): series_a == array_d # comparing with a scalar should broadcast; note that we are excluding # MultiIndex because in this case each item in the index is a tuple of # length 2, and therefore is considered an array of length 2 in the # comparison instead of a scalar if not isinstance(index_a, MultiIndex): expected3 = np.array([False] * (len(index_a) - 2) + [True, False]) # assuming the 2nd to last item is unique in the data item = index_a[-2] tm.assert_numpy_array_equal(index_a == item, expected3) tm.assert_series_equal(series_a == item, Series(expected3)) def test_hasnans_isnans(self): # GH 11343, added tests for hasnans / isnans for name, index in self.indices.items(): if isinstance(index, MultiIndex): pass else: idx = index.copy() # cases in indices doesn't include NaN expected = np.array([False] * len(idx), dtype=bool) tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is False idx = index.copy() values = np.asarray(idx.values) if len(index) == 0: continue elif isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_fillna(self): # GH 11343 for name, index in self.indices.items(): if len(index) == 0: pass elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.fillna(idx[0]) else: idx = index.copy() result = idx.fillna(idx[0]) tm.assert_index_equal(result, idx) assert result is not idx msg = "'value' must be a scalar, passed: " with pytest.raises(TypeError, match=msg): idx.fillna([idx[0]]) idx = index.copy() values = np.asarray(idx.values) if isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_nulls(self): # this is really a smoke test for the methods # as these are adequately tested for function elsewhere for name, index in self.indices.items(): if len(index) == 0: tm.assert_numpy_array_equal( index.isna(), np.array([], dtype=bool)) elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.isna() else: if not index.hasnans: tm.assert_numpy_array_equal( index.isna(), np.zeros(len(index), dtype=bool)) tm.assert_numpy_array_equal( index.notna(), np.ones(len(index), dtype=bool)) else: result = isna(index) tm.assert_numpy_array_equal(index.isna(), result) tm.assert_numpy_array_equal(index.notna(), ~result) def test_empty(self): # GH 15270 index = self.create_index() assert not index.empty assert index[:0].empty def test_join_self_unique(self, join_type): index = self.create_index() if index.is_unique: joined = index.join(index, how=join_type) assert (index == joined).all() def test_map(self): # callable index = self.create_index() # we don't infer UInt64 if isinstance(index, pd.UInt64Index): expected = index.astype('int64') else: expected = index result = index.map(lambda x: x) tm.assert_index_equal(result, expected) @pytest.mark.parametrize( "mapper", [ lambda values, index: {i: e for e, i in zip(values, index)}, lambda values, index: pd.Series(values, index)]) def test_map_dictlike(self, mapper): index = self.create_index() if isinstance(index, (pd.CategoricalIndex, pd.IntervalIndex)): pytest.skip("skipping tests for {}".format(type(index))) identity = mapper(index.values, index) # we don't infer to UInt64 for a dict if isinstance(index, pd.UInt64Index) and isinstance(identity, dict): expected = index.astype('int64') else: expected = index result = index.map(identity) tm.assert_index_equal(result, expected) # empty mappable expected = pd.Index([np.nan] * len(index)) result = index.map(mapper(expected, index)) tm.assert_index_equal(result, expected) def test_putmask_with_wrong_mask(self): # GH18368 index = self.create_index() with pytest.raises(ValueError): index.putmask(np.ones(len(index) + 1, np.bool), 1) with pytest.raises(ValueError): index.putmask(np.ones(len(index) - 1, np.bool), 1) with pytest.raises(ValueError): index.putmask('foo', 1) @pytest.mark.parametrize('copy', [True, False]) @pytest.mark.parametrize('name', [None, 'foo']) @pytest.mark.parametrize('ordered', [True, False]) def test_astype_category(self, copy, name, ordered): # GH 18630 index = self.create_index() if name: index = index.rename(name) # standard categories dtype = CategoricalDtype(ordered=ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, ordered=ordered) tm.assert_index_equal(result, expected) # non-standard categories dtype = CategoricalDtype(index.unique().tolist()[:-1], ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, dtype=dtype) tm.assert_index_equal(result, expected) if ordered is False: # dtype='category' defaults to ordered=False, so only test once result = index.astype('category', copy=copy) expected = CategoricalIndex(index.values, name=name) tm.assert_index_equal(result, expected) def test_is_unique(self): # initialize a unique index index = self.create_index().drop_duplicates() assert index.is_unique is True # empty index should be unique index_empty = index[:0] assert index_empty.is_unique is True # test basic dupes index_dup = index.insert(0, index[0]) assert index_dup.is_unique is False # single NA should be unique index_na = index.insert(0, np.nan) assert index_na.is_unique is True # multiple NA should not be unique index_na_dup = index_na.insert(0, np.nan) assert index_na_dup.is_unique is False
import numpy as np import pytest from pandas import DataFrame, MultiIndex, Series from pandas.core import common as com import pandas.util.testing as tm def test_detect_chained_assignment(): # Inplace ops, originally from: # http://stackoverflow.com/questions/20508968/series-fillna-in-a-multiindex-dataframe-does-not-fill-is-this-a-bug a = [12, 23] b = [123, None] c = [1234, 2345] d = [12345, 23456] tuples = [('eyes', 'left'), ('eyes', 'right'), ('ears', 'left'), ('ears', 'right')] events = {('eyes', 'left'): a, ('eyes', 'right'): b, ('ears', 'left'): c, ('ears', 'right'): d} multiind = MultiIndex.from_tuples(tuples, names=['part', 'side']) zed = DataFrame(events, index=['a', 'b'], columns=multiind) with pytest.raises(com.SettingWithCopyError): zed['eyes']['right'].fillna(value=555, inplace=True) def test_cache_updating(): # 5216 # make sure that we don't try to set a dead cache a = np.random.rand(10, 3) df = DataFrame(a, columns=['x', 'y', 'z']) tuples = [(i, j) for i in range(5) for j in range(2)] index = MultiIndex.from_tuples(tuples) df.index = index # setting via chained assignment # but actually works, since everything is a view df.loc[0]['z'].iloc[0] = 1. result = df.loc[(0, 0), 'z'] assert result == 1 # correct setting df.loc[(0, 0), 'z'] = 2 result = df.loc[(0, 0), 'z'] assert result == 2 def test_indexer_caching(): # GH5727 # make sure that indexers are in the _internal_names_set n = 1000001 arrays = (range(n), range(n)) index = MultiIndex.from_tuples(zip(*arrays)) s = Series(np.zeros(n), index=index) str(s) # setitem expected = Series(np.ones(n), index=index) s = Series(np.zeros(n), index=index) s[s == 0] = 1 tm.assert_series_equal(s, expected)
cbertinato/pandas
pandas/tests/indexing/multiindex/test_chaining_and_caching.py
pandas/tests/indexes/common.py
import numpy as np from pandas._libs import algos as libalgos, index as libindex import pandas.util.testing as tm class TestNumericEngine: def test_is_monotonic(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) # monotonic increasing engine = engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array([1] * num + [2] * num + [1] * num, dtype=dtype) engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 3, 2], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array([1, 2, 1], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 2, 3], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == 1 # monotonic num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == slice(1000, 2000) # not monotonic arr = np.array([1, 2, 3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc(2) assert (result == expected).all() def test_get_backfill_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill(arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad(arr, new) tm.assert_numpy_array_equal(result, expected) class TestObjectEngine: engine_type = libindex.ObjectEngine dtype = np.object_ values = list('abc') def test_is_monotonic(self): num = 1000 arr = np.array(['a'] * num + ['a'] * num + ['c'] * num, dtype=self.dtype) # monotonic increasing engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array(['a'] * num + ['b'] * num + ['a'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array(['a', 'b', 'a'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == 1 # monotonic num = 1000 arr = np.array(['a'] * num + ['b'] * num + ['c'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == slice(1000, 2000) # not monotonic arr = np.array(self.values * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc('b') assert (result == expected).all() def test_get_backfill_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill["object"](arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad["object"](arr, new) tm.assert_numpy_array_equal(result, expected)
import numpy as np import pytest from pandas import DataFrame, MultiIndex, Series from pandas.core import common as com import pandas.util.testing as tm def test_detect_chained_assignment(): # Inplace ops, originally from: # http://stackoverflow.com/questions/20508968/series-fillna-in-a-multiindex-dataframe-does-not-fill-is-this-a-bug a = [12, 23] b = [123, None] c = [1234, 2345] d = [12345, 23456] tuples = [('eyes', 'left'), ('eyes', 'right'), ('ears', 'left'), ('ears', 'right')] events = {('eyes', 'left'): a, ('eyes', 'right'): b, ('ears', 'left'): c, ('ears', 'right'): d} multiind = MultiIndex.from_tuples(tuples, names=['part', 'side']) zed = DataFrame(events, index=['a', 'b'], columns=multiind) with pytest.raises(com.SettingWithCopyError): zed['eyes']['right'].fillna(value=555, inplace=True) def test_cache_updating(): # 5216 # make sure that we don't try to set a dead cache a = np.random.rand(10, 3) df = DataFrame(a, columns=['x', 'y', 'z']) tuples = [(i, j) for i in range(5) for j in range(2)] index = MultiIndex.from_tuples(tuples) df.index = index # setting via chained assignment # but actually works, since everything is a view df.loc[0]['z'].iloc[0] = 1. result = df.loc[(0, 0), 'z'] assert result == 1 # correct setting df.loc[(0, 0), 'z'] = 2 result = df.loc[(0, 0), 'z'] assert result == 2 def test_indexer_caching(): # GH5727 # make sure that indexers are in the _internal_names_set n = 1000001 arrays = (range(n), range(n)) index = MultiIndex.from_tuples(zip(*arrays)) s = Series(np.zeros(n), index=index) str(s) # setitem expected = Series(np.ones(n), index=index) s = Series(np.zeros(n), index=index) s[s == 0] = 1 tm.assert_series_equal(s, expected)
cbertinato/pandas
pandas/tests/indexing/multiindex/test_chaining_and_caching.py
pandas/tests/indexing/test_indexing_engines.py
""" Arithmetic operations for PandasObjects This is not a public API. """ import datetime import operator import textwrap from typing import Dict, Optional import warnings import numpy as np from pandas._libs import algos as libalgos, lib, ops as libops from pandas.errors import NullFrequencyError from pandas.util._decorators import Appender from pandas.core.dtypes.cast import ( construct_1d_object_array_from_listlike, find_common_type, maybe_upcast_putmask) from pandas.core.dtypes.common import ( ensure_object, is_bool_dtype, is_categorical_dtype, is_datetime64_dtype, is_datetime64tz_dtype, is_datetimelike_v_numeric, is_extension_array_dtype, is_integer_dtype, is_list_like, is_object_dtype, is_period_dtype, is_scalar, is_timedelta64_dtype, needs_i8_conversion) from pandas.core.dtypes.generic import ( ABCDataFrame, ABCIndex, ABCIndexClass, ABCSeries, ABCSparseArray, ABCSparseSeries) from pandas.core.dtypes.missing import isna, notna import pandas as pd import pandas.core.common as com import pandas.core.missing as missing # ----------------------------------------------------------------------------- # Ops Wrapping Utilities def get_op_result_name(left, right): """ Find the appropriate name to pin to an operation result. This result should always be either an Index or a Series. Parameters ---------- left : {Series, Index} right : object Returns ------- name : object Usually a string """ # `left` is always a pd.Series when called from within ops if isinstance(right, (ABCSeries, pd.Index)): name = _maybe_match_name(left, right) else: name = left.name return name def _maybe_match_name(a, b): """ Try to find a name to attach to the result of an operation between a and b. If only one of these has a `name` attribute, return that name. Otherwise return a consensus name if they match of None if they have different names. Parameters ---------- a : object b : object Returns ------- name : str or None See Also -------- pandas.core.common.consensus_name_attr """ a_has = hasattr(a, 'name') b_has = hasattr(b, 'name') if a_has and b_has: if a.name == b.name: return a.name else: # TODO: what if they both have np.nan for their names? return None elif a_has: return a.name elif b_has: return b.name return None def maybe_upcast_for_op(obj): """ Cast non-pandas objects to pandas types to unify behavior of arithmetic and comparison operations. Parameters ---------- obj: object Returns ------- out : object Notes ----- Be careful to call this *after* determining the `name` attribute to be attached to the result of the arithmetic operation. """ if type(obj) is datetime.timedelta: # GH#22390 cast up to Timedelta to rely on Timedelta # implementation; otherwise operation against numeric-dtype # raises TypeError return pd.Timedelta(obj) elif isinstance(obj, np.timedelta64) and not isna(obj): # In particular non-nanosecond timedelta64 needs to be cast to # nanoseconds, or else we get undesired behavior like # np.timedelta64(3, 'D') / 2 == np.timedelta64(1, 'D') # The isna check is to avoid casting timedelta64("NaT"), which would # return NaT and incorrectly be treated as a datetime-NaT. return pd.Timedelta(obj) elif isinstance(obj, np.ndarray) and is_timedelta64_dtype(obj): # GH#22390 Unfortunately we need to special-case right-hand # timedelta64 dtypes because numpy casts integer dtypes to # timedelta64 when operating with timedelta64 return pd.TimedeltaIndex(obj) return obj # ----------------------------------------------------------------------------- # Reversed Operations not available in the stdlib operator module. # Defining these instead of using lambdas allows us to reference them by name. def radd(left, right): return right + left def rsub(left, right): return right - left def rmul(left, right): return right * left def rdiv(left, right): return right / left def rtruediv(left, right): return right / left def rfloordiv(left, right): return right // left def rmod(left, right): # check if right is a string as % is the string # formatting operation; this is a TypeError # otherwise perform the op if isinstance(right, str): raise TypeError("{typ} cannot perform the operation mod".format( typ=type(left).__name__)) return right % left def rdivmod(left, right): return divmod(right, left) def rpow(left, right): return right ** left def rand_(left, right): return operator.and_(right, left) def ror_(left, right): return operator.or_(right, left) def rxor(left, right): return operator.xor(right, left) # ----------------------------------------------------------------------------- def make_invalid_op(name): """ Return a binary method that always raises a TypeError. Parameters ---------- name : str Returns ------- invalid_op : function """ def invalid_op(self, other=None): raise TypeError("cannot perform {name} with this index type: " "{typ}".format(name=name, typ=type(self).__name__)) invalid_op.__name__ = name return invalid_op def _gen_eval_kwargs(name): """ Find the keyword arguments to pass to numexpr for the given operation. Parameters ---------- name : str Returns ------- eval_kwargs : dict Examples -------- >>> _gen_eval_kwargs("__add__") {} >>> _gen_eval_kwargs("rtruediv") {'reversed': True, 'truediv': True} """ kwargs = {} # Series appear to only pass __add__, __radd__, ... # but DataFrame gets both these dunder names _and_ non-dunder names # add, radd, ... name = name.replace('__', '') if name.startswith('r'): if name not in ['radd', 'rand', 'ror', 'rxor']: # Exclude commutative operations kwargs['reversed'] = True if name in ['truediv', 'rtruediv']: kwargs['truediv'] = True if name in ['ne']: kwargs['masker'] = True return kwargs def _gen_fill_zeros(name): """ Find the appropriate fill value to use when filling in undefined values in the results of the given operation caused by operating on (generally dividing by) zero. Parameters ---------- name : str Returns ------- fill_value : {None, np.nan, np.inf} """ name = name.strip('__') if 'div' in name: # truediv, floordiv, div, and reversed variants fill_value = np.inf elif 'mod' in name: # mod, rmod fill_value = np.nan else: fill_value = None return fill_value def _get_frame_op_default_axis(name): """ Only DataFrame cares about default_axis, specifically: special methods have default_axis=None and flex methods have default_axis='columns'. Parameters ---------- name : str Returns ------- default_axis: str or None """ if name.replace('__r', '__') in ['__and__', '__or__', '__xor__']: # bool methods return 'columns' elif name.startswith('__'): # __add__, __mul__, ... return None else: # add, mul, ... return 'columns' def _get_opstr(op, cls): """ Find the operation string, if any, to pass to numexpr for this operation. Parameters ---------- op : binary operator cls : class Returns ------- op_str : string or None """ # numexpr is available for non-sparse classes subtyp = getattr(cls, '_subtyp', '') use_numexpr = 'sparse' not in subtyp if not use_numexpr: # if we're not using numexpr, then don't pass a str_rep return None return {operator.add: '+', radd: '+', operator.mul: '*', rmul: '*', operator.sub: '-', rsub: '-', operator.truediv: '/', rtruediv: '/', operator.floordiv: '//', rfloordiv: '//', operator.mod: None, # TODO: Why None for mod but '%' for rmod? rmod: '%', operator.pow: '**', rpow: '**', operator.eq: '==', operator.ne: '!=', operator.le: '<=', operator.lt: '<', operator.ge: '>=', operator.gt: '>', operator.and_: '&', rand_: '&', operator.or_: '|', ror_: '|', operator.xor: '^', rxor: '^', divmod: None, rdivmod: None}[op] def _get_op_name(op, special): """ Find the name to attach to this method according to conventions for special and non-special methods. Parameters ---------- op : binary operator special : bool Returns ------- op_name : str """ opname = op.__name__.strip('_') if special: opname = '__{opname}__'.format(opname=opname) return opname # ----------------------------------------------------------------------------- # Docstring Generation and Templates _add_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.add(b, fill_value=0) a 2.0 b 1.0 c 1.0 d 1.0 e NaN dtype: float64 """ _sub_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.subtract(b, fill_value=0) a 0.0 b 1.0 c 1.0 d -1.0 e NaN dtype: float64 """ _mul_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.multiply(b, fill_value=0) a 1.0 b 0.0 c 0.0 d 0.0 e NaN dtype: float64 """ _div_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.divide(b, fill_value=0) a 1.0 b inf c inf d 0.0 e NaN dtype: float64 """ _floordiv_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.floordiv(b, fill_value=0) a 1.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _mod_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.mod(b, fill_value=0) a 0.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _pow_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.pow(b, fill_value=0) a 1.0 b 1.0 c 1.0 d 0.0 e NaN dtype: float64 """ _op_descriptions = { # Arithmetic Operators 'add': {'op': '+', 'desc': 'Addition', 'reverse': 'radd', 'series_examples': _add_example_SERIES}, 'sub': {'op': '-', 'desc': 'Subtraction', 'reverse': 'rsub', 'series_examples': _sub_example_SERIES}, 'mul': {'op': '*', 'desc': 'Multiplication', 'reverse': 'rmul', 'series_examples': _mul_example_SERIES, 'df_examples': None}, 'mod': {'op': '%', 'desc': 'Modulo', 'reverse': 'rmod', 'series_examples': _mod_example_SERIES}, 'pow': {'op': '**', 'desc': 'Exponential power', 'reverse': 'rpow', 'series_examples': _pow_example_SERIES, 'df_examples': None}, 'truediv': {'op': '/', 'desc': 'Floating division', 'reverse': 'rtruediv', 'series_examples': _div_example_SERIES, 'df_examples': None}, 'floordiv': {'op': '//', 'desc': 'Integer division', 'reverse': 'rfloordiv', 'series_examples': _floordiv_example_SERIES, 'df_examples': None}, 'divmod': {'op': 'divmod', 'desc': 'Integer division and modulo', 'reverse': 'rdivmod', 'series_examples': None, 'df_examples': None}, # Comparison Operators 'eq': {'op': '==', 'desc': 'Equal to', 'reverse': None, 'series_examples': None}, 'ne': {'op': '!=', 'desc': 'Not equal to', 'reverse': None, 'series_examples': None}, 'lt': {'op': '<', 'desc': 'Less than', 'reverse': None, 'series_examples': None}, 'le': {'op': '<=', 'desc': 'Less than or equal to', 'reverse': None, 'series_examples': None}, 'gt': {'op': '>', 'desc': 'Greater than', 'reverse': None, 'series_examples': None}, 'ge': {'op': '>=', 'desc': 'Greater than or equal to', 'reverse': None, 'series_examples': None} } # type: Dict[str, Dict[str, Optional[str]]] _op_names = list(_op_descriptions.keys()) for key in _op_names: reverse_op = _op_descriptions[key]['reverse'] if reverse_op is not None: _op_descriptions[reverse_op] = _op_descriptions[key].copy() _op_descriptions[reverse_op]['reverse'] = key _flex_doc_SERIES = """ Return {desc} of series and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. Parameters ---------- other : Series or scalar value fill_value : None or float value, default None (NaN) Fill existing missing (NaN) values, and any new element needed for successful Series alignment, with this value before computation. If data in both corresponding Series locations is missing the result will be missing. level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- Series The result of the operation. See Also -------- Series.{reverse} """ _arith_doc_FRAME = """ Binary operator %s with support to substitute a fill_value for missing data in one of the inputs Parameters ---------- other : Series, DataFrame, or constant axis : {0, 1, 'index', 'columns'} For Series input, axis to match Series index on fill_value : None or float value, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level Returns ------- result : DataFrame Notes ----- Mismatched indices will be unioned together """ _flex_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. With reverse version, `{reverse}`. Among flexible wrappers (`add`, `sub`, `mul`, `div`, `mod`, `pow`) to arithmetic operators: `+`, `-`, `*`, `/`, `//`, `%`, `**`. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}} Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). For Series input, axis to match Series index on. level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. fill_value : float or None, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing. Returns ------- DataFrame Result of the arithmetic operation. See Also -------- DataFrame.add : Add DataFrames. DataFrame.sub : Subtract DataFrames. DataFrame.mul : Multiply DataFrames. DataFrame.div : Divide DataFrames (float division). DataFrame.truediv : Divide DataFrames (float division). DataFrame.floordiv : Divide DataFrames (integer division). DataFrame.mod : Calculate modulo (remainder after division). DataFrame.pow : Calculate exponential power. Notes ----- Mismatched indices will be unioned together. Examples -------- >>> df = pd.DataFrame({{'angles': [0, 3, 4], ... 'degrees': [360, 180, 360]}}, ... index=['circle', 'triangle', 'rectangle']) >>> df angles degrees circle 0 360 triangle 3 180 rectangle 4 360 Add a scalar with operator version which return the same results. >>> df + 1 angles degrees circle 1 361 triangle 4 181 rectangle 5 361 >>> df.add(1) angles degrees circle 1 361 triangle 4 181 rectangle 5 361 Divide by constant with reverse version. >>> df.div(10) angles degrees circle 0.0 36.0 triangle 0.3 18.0 rectangle 0.4 36.0 >>> df.rdiv(10) angles degrees circle inf 0.027778 triangle 3.333333 0.055556 rectangle 2.500000 0.027778 Subtract a list and Series by axis with operator version. >>> df - [1, 2] angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub([1, 2], axis='columns') angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub(pd.Series([1, 1, 1], index=['circle', 'triangle', 'rectangle']), ... axis='index') angles degrees circle -1 359 triangle 2 179 rectangle 3 359 Multiply a DataFrame of different shape with operator version. >>> other = pd.DataFrame({{'angles': [0, 3, 4]}}, ... index=['circle', 'triangle', 'rectangle']) >>> other angles circle 0 triangle 3 rectangle 4 >>> df * other angles degrees circle 0 NaN triangle 9 NaN rectangle 16 NaN >>> df.mul(other, fill_value=0) angles degrees circle 0 0.0 triangle 9 0.0 rectangle 16 0.0 Divide by a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'angles': [0, 3, 4, 4, 5, 6], ... 'degrees': [360, 180, 360, 360, 540, 720]}}, ... index=[['A', 'A', 'A', 'B', 'B', 'B'], ... ['circle', 'triangle', 'rectangle', ... 'square', 'pentagon', 'hexagon']]) >>> df_multindex angles degrees A circle 0 360 triangle 3 180 rectangle 4 360 B square 4 360 pentagon 5 540 hexagon 6 720 >>> df.div(df_multindex, level=1, fill_value=0) angles degrees A circle NaN 1.0 triangle 1.0 1.0 rectangle 1.0 1.0 B square 0.0 0.0 pentagon 0.0 0.0 hexagon 0.0 0.0 """ _flex_comp_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Among flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison operators. Equivalent to `==`, `=!`, `<=`, `<`, `>=`, `>` with support to choose axis (rows or columns) and level for comparison. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}}, default 'columns' Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- DataFrame of bool Result of the comparison. See Also -------- DataFrame.eq : Compare DataFrames for equality elementwise. DataFrame.ne : Compare DataFrames for inequality elementwise. DataFrame.le : Compare DataFrames for less than inequality or equality elementwise. DataFrame.lt : Compare DataFrames for strictly less than inequality elementwise. DataFrame.ge : Compare DataFrames for greater than inequality or equality elementwise. DataFrame.gt : Compare DataFrames for strictly greater than inequality elementwise. Notes ----- Mismatched indices will be unioned together. `NaN` values are considered different (i.e. `NaN` != `NaN`). Examples -------- >>> df = pd.DataFrame({{'cost': [250, 150, 100], ... 'revenue': [100, 250, 300]}}, ... index=['A', 'B', 'C']) >>> df cost revenue A 250 100 B 150 250 C 100 300 Comparison with a scalar, using either the operator or method: >>> df == 100 cost revenue A False True B False False C True False >>> df.eq(100) cost revenue A False True B False False C True False When `other` is a :class:`Series`, the columns of a DataFrame are aligned with the index of `other` and broadcast: >>> df != pd.Series([100, 250], index=["cost", "revenue"]) cost revenue A True True B True False C False True Use the method to control the broadcast axis: >>> df.ne(pd.Series([100, 300], index=["A", "D"]), axis='index') cost revenue A True False B True True C True True D True True When comparing to an arbitrary sequence, the number of columns must match the number elements in `other`: >>> df == [250, 100] cost revenue A True True B False False C False False Use the method to control the axis: >>> df.eq([250, 250, 100], axis='index') cost revenue A True False B False True C True False Compare to a DataFrame of different shape. >>> other = pd.DataFrame({{'revenue': [300, 250, 100, 150]}}, ... index=['A', 'B', 'C', 'D']) >>> other revenue A 300 B 250 C 100 D 150 >>> df.gt(other) cost revenue A False False B False False C False True D False False Compare to a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'cost': [250, 150, 100, 150, 300, 220], ... 'revenue': [100, 250, 300, 200, 175, 225]}}, ... index=[['Q1', 'Q1', 'Q1', 'Q2', 'Q2', 'Q2'], ... ['A', 'B', 'C', 'A', 'B', 'C']]) >>> df_multindex cost revenue Q1 A 250 100 B 150 250 C 100 300 Q2 A 150 200 B 300 175 C 220 225 >>> df.le(df_multindex, level=1) cost revenue Q1 A True True B True True C True True Q2 A False True B True False C True False """ def _make_flex_doc(op_name, typ): """ Make the appropriate substitutions for the given operation and class-typ into either _flex_doc_SERIES or _flex_doc_FRAME to return the docstring to attach to a generated method. Parameters ---------- op_name : str {'__add__', '__sub__', ... '__eq__', '__ne__', ...} typ : str {series, 'dataframe']} Returns ------- doc : str """ op_name = op_name.replace('__', '') op_desc = _op_descriptions[op_name] if op_name.startswith('r'): equiv = 'other ' + op_desc['op'] + ' ' + typ else: equiv = typ + ' ' + op_desc['op'] + ' other' if typ == 'series': base_doc = _flex_doc_SERIES doc_no_examples = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) if op_desc['series_examples']: doc = doc_no_examples + op_desc['series_examples'] else: doc = doc_no_examples elif typ == 'dataframe': base_doc = _flex_doc_FRAME doc = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) else: raise AssertionError('Invalid typ argument.') return doc # ----------------------------------------------------------------------------- # Masking NA values and fallbacks for operations numpy does not support def fill_binop(left, right, fill_value): """ If a non-None fill_value is given, replace null entries in left and right with this value, but only in positions where _one_ of left/right is null, not both. Parameters ---------- left : array-like right : array-like fill_value : object Returns ------- left : array-like right : array-like Notes ----- Makes copies if fill_value is not None """ # TODO: can we make a no-copy implementation? if fill_value is not None: left_mask = isna(left) right_mask = isna(right) left = left.copy() right = right.copy() # one but not both mask = left_mask ^ right_mask left[left_mask & mask] = fill_value right[right_mask & mask] = fill_value return left, right def mask_cmp_op(x, y, op): """ Apply the function `op` to only non-null points in x and y. Parameters ---------- x : array-like y : array-like op : binary operation Returns ------- result : ndarray[bool] """ xrav = x.ravel() result = np.empty(x.size, dtype=bool) if isinstance(y, (np.ndarray, ABCSeries)): yrav = y.ravel() mask = notna(xrav) & notna(yrav) result[mask] = op(np.array(list(xrav[mask])), np.array(list(yrav[mask]))) else: mask = notna(xrav) result[mask] = op(np.array(list(xrav[mask])), y) if op == operator.ne: # pragma: no cover np.putmask(result, ~mask, True) else: np.putmask(result, ~mask, False) result = result.reshape(x.shape) return result def masked_arith_op(x, y, op): """ If the given arithmetic operation fails, attempt it again on only the non-null elements of the input array(s). Parameters ---------- x : np.ndarray y : np.ndarray, Series, Index op : binary operator """ # For Series `x` is 1D so ravel() is a no-op; calling it anyway makes # the logic valid for both Series and DataFrame ops. xrav = x.ravel() assert isinstance(x, (np.ndarray, ABCSeries)), type(x) if isinstance(y, (np.ndarray, ABCSeries, ABCIndexClass)): dtype = find_common_type([x.dtype, y.dtype]) result = np.empty(x.size, dtype=dtype) # PeriodIndex.ravel() returns int64 dtype, so we have # to work around that case. See GH#19956 yrav = y if is_period_dtype(y) else y.ravel() mask = notna(xrav) & notna(yrav) if yrav.shape != mask.shape: # FIXME: GH#5284, GH#5035, GH#19448 # Without specifically raising here we get mismatched # errors in Py3 (TypeError) vs Py2 (ValueError) # Note: Only = an issue in DataFrame case raise ValueError('Cannot broadcast operands together.') if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], com.values_from_object(yrav[mask])) else: assert is_scalar(y), type(y) assert isinstance(x, np.ndarray), type(x) # mask is only meaningful for x result = np.empty(x.size, dtype=x.dtype) mask = notna(xrav) # 1 ** np.nan is 1. So we have to unmask those. if op == pow: mask = np.where(x == 1, False, mask) elif op == rpow: mask = np.where(y == 1, False, mask) if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], y) result, changed = maybe_upcast_putmask(result, ~mask, np.nan) result = result.reshape(x.shape) # 2D compat return result def invalid_comparison(left, right, op): """ If a comparison has mismatched types and is not necessarily meaningful, follow python3 conventions by: - returning all-False for equality - returning all-True for inequality - raising TypeError otherwise Parameters ---------- left : array-like right : scalar, array-like op : operator.{eq, ne, lt, le, gt} Raises ------ TypeError : on inequality comparisons """ if op is operator.eq: res_values = np.zeros(left.shape, dtype=bool) elif op is operator.ne: res_values = np.ones(left.shape, dtype=bool) else: raise TypeError("Invalid comparison between dtype={dtype} and {typ}" .format(dtype=left.dtype, typ=type(right).__name__)) return res_values # ----------------------------------------------------------------------------- # Dispatch logic def should_series_dispatch(left, right, op): """ Identify cases where a DataFrame operation should dispatch to its Series counterpart. Parameters ---------- left : DataFrame right : DataFrame op : binary operator Returns ------- override : bool """ if left._is_mixed_type or right._is_mixed_type: return True if not len(left.columns) or not len(right.columns): # ensure obj.dtypes[0] exists for each obj return False ldtype = left.dtypes.iloc[0] rdtype = right.dtypes.iloc[0] if ((is_timedelta64_dtype(ldtype) and is_integer_dtype(rdtype)) or (is_timedelta64_dtype(rdtype) and is_integer_dtype(ldtype))): # numpy integer dtypes as timedelta64 dtypes in this scenario return True if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype): # in particular case where right is an array of DateOffsets return True return False def dispatch_to_series(left, right, func, str_rep=None, axis=None): """ Evaluate the frame operation func(left, right) by evaluating column-by-column, dispatching to the Series implementation. Parameters ---------- left : DataFrame right : scalar or DataFrame func : arithmetic or comparison operator str_rep : str or None, default None axis : {None, 0, 1, "index", "columns"} Returns ------- DataFrame """ # Note: we use iloc to access columns for compat with cases # with non-unique columns. import pandas.core.computation.expressions as expressions right = lib.item_from_zerodim(right) if lib.is_scalar(right) or np.ndim(right) == 0: def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} elif isinstance(right, ABCDataFrame): assert right._indexed_same(left) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[:, i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries) and axis == "columns": # We only get here if called via left._combine_match_columns, # in which case we specifically want to operate row-by-row assert right.index.equals(left.columns) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries): assert right.index.equals(left.index) # Handle other cases later def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} else: # Remaining cases have less-obvious dispatch rules raise NotImplementedError(right) new_data = expressions.evaluate(column_op, str_rep, left, right) result = left._constructor(new_data, index=left.index, copy=False) # Pin columns instead of passing to constructor for compat with # non-unique columns case result.columns = left.columns return result def dispatch_to_index_op(op, left, right, index_class): """ Wrap Series left in the given index_class to delegate the operation op to the index implementation. DatetimeIndex and TimedeltaIndex perform type checking, timezone handling, overflow checks, etc. Parameters ---------- op : binary operator (operator.add, operator.sub, ...) left : Series right : object index_class : DatetimeIndex or TimedeltaIndex Returns ------- result : object, usually DatetimeIndex, TimedeltaIndex, or Series """ left_idx = index_class(left) # avoid accidentally allowing integer add/sub. For datetime64[tz] dtypes, # left_idx may inherit a freq from a cached DatetimeIndex. # See discussion in GH#19147. if getattr(left_idx, 'freq', None) is not None: left_idx = left_idx._shallow_copy(freq=None) try: result = op(left_idx, right) except NullFrequencyError: # DatetimeIndex and TimedeltaIndex with freq == None raise ValueError # on add/sub of integers (or int-like). We re-raise as a TypeError. raise TypeError('incompatible type for a datetime/timedelta ' 'operation [{name}]'.format(name=op.__name__)) return result def dispatch_to_extension_op(op, left, right): """ Assume that left or right is a Series backed by an ExtensionArray, apply the operator defined by op. """ # The op calls will raise TypeError if the op is not defined # on the ExtensionArray # unbox Series and Index to arrays if isinstance(left, (ABCSeries, ABCIndexClass)): new_left = left._values else: new_left = left if isinstance(right, (ABCSeries, ABCIndexClass)): new_right = right._values else: new_right = right res_values = op(new_left, new_right) res_name = get_op_result_name(left, right) if op.__name__ in ['divmod', 'rdivmod']: return _construct_divmod_result( left, res_values, left.index, res_name) return _construct_result(left, res_values, left.index, res_name) # ----------------------------------------------------------------------------- # Functions that add arithmetic methods to objects, given arithmetic factory # methods def _get_method_wrappers(cls): """ Find the appropriate operation-wrappers to use when defining flex/special arithmetic, boolean, and comparison operations with the given class. Parameters ---------- cls : class Returns ------- arith_flex : function or None comp_flex : function or None arith_special : function comp_special : function bool_special : function Notes ----- None is only returned for SparseArray """ if issubclass(cls, ABCSparseSeries): # Be sure to catch this before ABCSeries and ABCSparseArray, # as they will both come see SparseSeries as a subclass arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SPARSE_SERIES comp_special = _arith_method_SPARSE_SERIES bool_special = _bool_method_SERIES # TODO: I don't think the functions defined by bool_method are tested elif issubclass(cls, ABCSeries): # Just Series; SparseSeries is caught above arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SERIES comp_special = _comp_method_SERIES bool_special = _bool_method_SERIES elif issubclass(cls, ABCSparseArray): arith_flex = None comp_flex = None arith_special = _arith_method_SPARSE_ARRAY comp_special = _arith_method_SPARSE_ARRAY bool_special = _arith_method_SPARSE_ARRAY elif issubclass(cls, ABCDataFrame): # Same for DataFrame and SparseDataFrame arith_flex = _arith_method_FRAME comp_flex = _flex_comp_method_FRAME arith_special = _arith_method_FRAME comp_special = _comp_method_FRAME bool_special = _arith_method_FRAME return arith_flex, comp_flex, arith_special, comp_special, bool_special def _create_methods(cls, arith_method, comp_method, bool_method, special): # creates actual methods based upon arithmetic, comp and bool method # constructors. have_divmod = issubclass(cls, ABCSeries) # divmod is available for Series and SparseSeries # yapf: disable new_methods = dict( add=arith_method(cls, operator.add, special), radd=arith_method(cls, radd, special), sub=arith_method(cls, operator.sub, special), mul=arith_method(cls, operator.mul, special), truediv=arith_method(cls, operator.truediv, special), floordiv=arith_method(cls, operator.floordiv, special), # Causes a floating point exception in the tests when numexpr enabled, # so for now no speedup mod=arith_method(cls, operator.mod, special), pow=arith_method(cls, operator.pow, special), # not entirely sure why this is necessary, but previously was included # so it's here to maintain compatibility rmul=arith_method(cls, rmul, special), rsub=arith_method(cls, rsub, special), rtruediv=arith_method(cls, rtruediv, special), rfloordiv=arith_method(cls, rfloordiv, special), rpow=arith_method(cls, rpow, special), rmod=arith_method(cls, rmod, special)) # yapf: enable new_methods['div'] = new_methods['truediv'] new_methods['rdiv'] = new_methods['rtruediv'] if have_divmod: # divmod doesn't have an op that is supported by numexpr new_methods['divmod'] = arith_method(cls, divmod, special) new_methods['rdivmod'] = arith_method(cls, rdivmod, special) new_methods.update(dict( eq=comp_method(cls, operator.eq, special), ne=comp_method(cls, operator.ne, special), lt=comp_method(cls, operator.lt, special), gt=comp_method(cls, operator.gt, special), le=comp_method(cls, operator.le, special), ge=comp_method(cls, operator.ge, special))) if bool_method: new_methods.update( dict(and_=bool_method(cls, operator.and_, special), or_=bool_method(cls, operator.or_, special), # For some reason ``^`` wasn't used in original. xor=bool_method(cls, operator.xor, special), rand_=bool_method(cls, rand_, special), ror_=bool_method(cls, ror_, special), rxor=bool_method(cls, rxor, special))) if special: dunderize = lambda x: '__{name}__'.format(name=x.strip('_')) else: dunderize = lambda x: x new_methods = {dunderize(k): v for k, v in new_methods.items()} return new_methods def add_methods(cls, new_methods): for name, method in new_methods.items(): # For most methods, if we find that the class already has a method # of the same name, it is OK to over-write it. The exception is # inplace methods (__iadd__, __isub__, ...) for SparseArray, which # retain the np.ndarray versions. force = not (issubclass(cls, ABCSparseArray) and name.startswith('__i')) if force or name not in cls.__dict__: setattr(cls, name, method) # ---------------------------------------------------------------------- # Arithmetic def add_special_arithmetic_methods(cls): """ Adds the full suite of special arithmetic methods (``__add__``, ``__sub__``, etc.) to the class. Parameters ---------- cls : class special methods will be defined and pinned to this class """ _, _, arith_method, comp_method, bool_method = _get_method_wrappers(cls) new_methods = _create_methods(cls, arith_method, comp_method, bool_method, special=True) # inplace operators (I feel like these should get passed an `inplace=True` # or just be removed def _wrap_inplace_method(method): """ return an inplace wrapper for this method """ def f(self, other): result = method(self, other) # this makes sure that we are aligned like the input # we are updating inplace so we want to ignore is_copy self._update_inplace(result.reindex_like(self, copy=False)._data, verify_is_copy=False) return self f.__name__ = "__i{name}__".format(name=method.__name__.strip("__")) return f new_methods.update( dict(__iadd__=_wrap_inplace_method(new_methods["__add__"]), __isub__=_wrap_inplace_method(new_methods["__sub__"]), __imul__=_wrap_inplace_method(new_methods["__mul__"]), __itruediv__=_wrap_inplace_method(new_methods["__truediv__"]), __ifloordiv__=_wrap_inplace_method(new_methods["__floordiv__"]), __imod__=_wrap_inplace_method(new_methods["__mod__"]), __ipow__=_wrap_inplace_method(new_methods["__pow__"]))) new_methods.update( dict(__iand__=_wrap_inplace_method(new_methods["__and__"]), __ior__=_wrap_inplace_method(new_methods["__or__"]), __ixor__=_wrap_inplace_method(new_methods["__xor__"]))) add_methods(cls, new_methods=new_methods) def add_flex_arithmetic_methods(cls): """ Adds the full suite of flex arithmetic methods (``pow``, ``mul``, ``add``) to the class. Parameters ---------- cls : class flex methods will be defined and pinned to this class """ flex_arith_method, flex_comp_method, _, _, _ = _get_method_wrappers(cls) new_methods = _create_methods(cls, flex_arith_method, flex_comp_method, bool_method=None, special=False) new_methods.update(dict(multiply=new_methods['mul'], subtract=new_methods['sub'], divide=new_methods['div'])) # opt out of bool flex methods for now assert not any(kname in new_methods for kname in ('ror_', 'rxor', 'rand_')) add_methods(cls, new_methods=new_methods) # ----------------------------------------------------------------------------- # Series def _align_method_SERIES(left, right, align_asobject=False): """ align lhs and rhs Series """ # ToDo: Different from _align_method_FRAME, list, tuple and ndarray # are not coerced here # because Series has inconsistencies described in #13637 if isinstance(right, ABCSeries): # avoid repeated alignment if not left.index.equals(right.index): if align_asobject: # to keep original value's dtype for bool ops left = left.astype(object) right = right.astype(object) left, right = left.align(right, copy=False) return left, right def _construct_result(left, result, index, name, dtype=None): """ If the raw op result has a non-None name (e.g. it is an Index object) and the name argument is None, then passing name to the constructor will not be enough; we still need to override the name attribute. """ out = left._constructor(result, index=index, dtype=dtype) out = out.__finalize__(left) out.name = name return out def _construct_divmod_result(left, result, index, name, dtype=None): """divmod returns a tuple of like indexed series instead of a single series. """ return ( _construct_result(left, result[0], index=index, name=name, dtype=dtype), _construct_result(left, result[1], index=index, name=name, dtype=dtype), ) def _arith_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) construct_result = (_construct_divmod_result if op in [divmod, rdivmod] else _construct_result) def na_op(x, y): """ Return the result of evaluating op on the passed in values. If native types are not compatible, try coersion to object dtype. Parameters ---------- x : array-like y : array-like or scalar Returns ------- array-like Raises ------ TypeError : invalid operation """ import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) except Exception: # TODO: more specific? if is_object_dtype(x): return libalgos.arrmap_object(x, lambda val: op(val, y)) raise result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result def wrapper(left, right): if isinstance(right, ABCDataFrame): return NotImplemented left, right = _align_method_SERIES(left, right) res_name = get_op_result_name(left, right) right = maybe_upcast_for_op(right) if is_categorical_dtype(left): raise TypeError("{typ} cannot perform the operation " "{op}".format(typ=type(left).__name__, op=str_rep)) elif is_datetime64_dtype(left) or is_datetime64tz_dtype(left): # Give dispatch_to_index_op a chance for tests like # test_dt64_series_add_intlike, which the index dispatching handles # specifically. result = dispatch_to_index_op(op, left, right, pd.DatetimeIndex) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) elif (is_extension_array_dtype(left) or (is_extension_array_dtype(right) and not is_scalar(right))): # GH#22378 disallow scalar to exclude e.g. "category", "Int64" return dispatch_to_extension_op(op, left, right) elif is_timedelta64_dtype(left): result = dispatch_to_index_op(op, left, right, pd.TimedeltaIndex) return construct_result(left, result, index=left.index, name=res_name) elif is_timedelta64_dtype(right): # We should only get here with non-scalar or timedelta64('NaT') # values for right # Note: we cannot use dispatch_to_index_op because # that may incorrectly raise TypeError when we # should get NullFrequencyError result = op(pd.Index(left), right) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) lvalues = left.values rvalues = right if isinstance(rvalues, ABCSeries): rvalues = rvalues.values with np.errstate(all='ignore'): result = na_op(lvalues, rvalues) return construct_result(left, result, index=left.index, name=res_name, dtype=None) wrapper.__name__ = op_name return wrapper def _comp_method_OBJECT_ARRAY(op, x, y): if isinstance(y, list): y = construct_1d_object_array_from_listlike(y) if isinstance(y, (np.ndarray, ABCSeries, ABCIndex)): if not is_object_dtype(y.dtype): y = y.astype(np.object_) if isinstance(y, (ABCSeries, ABCIndex)): y = y.values result = libops.vec_compare(x, y, op) else: result = libops.scalar_compare(x, y, op) return result def _comp_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) masker = _gen_eval_kwargs(op_name).get('masker', False) def na_op(x, y): # TODO: # should have guarantess on what x, y can be type-wise # Extension Dtypes are not called here # Checking that cases that were once handled here are no longer # reachable. assert not (is_categorical_dtype(y) and not is_scalar(y)) if is_object_dtype(x.dtype): result = _comp_method_OBJECT_ARRAY(op, x, y) elif is_datetimelike_v_numeric(x, y): return invalid_comparison(x, y, op) else: # we want to compare like types # we only want to convert to integer like if # we are not NotImplemented, otherwise # we would allow datetime64 (but viewed as i8) against # integer comparisons # we have a datetime/timedelta and may need to convert assert not needs_i8_conversion(x) mask = None if not is_scalar(y) and needs_i8_conversion(y): mask = isna(x) | isna(y) y = y.view('i8') x = x.view('i8') method = getattr(x, op_name, None) if method is not None: with np.errstate(all='ignore'): result = method(y) if result is NotImplemented: return invalid_comparison(x, y, op) else: result = op(x, y) if mask is not None and mask.any(): result[mask] = masker return result def wrapper(self, other, axis=None): # Validate the axis parameter if axis is not None: self._get_axis_number(axis) res_name = get_op_result_name(self, other) if isinstance(other, list): # TODO: same for tuples? other = np.asarray(other) if isinstance(other, ABCDataFrame): # pragma: no cover # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, ABCSeries) and not self._indexed_same(other): raise ValueError("Can only compare identically-labeled " "Series objects") elif is_categorical_dtype(self): # Dispatch to Categorical implementation; pd.CategoricalIndex # behavior is non-canonical GH#19513 res_values = dispatch_to_index_op(op, self, other, pd.Categorical) return self._constructor(res_values, index=self.index, name=res_name) elif is_datetime64_dtype(self) or is_datetime64tz_dtype(self): # Dispatch to DatetimeIndex to ensure identical # Series/Index behavior if (isinstance(other, datetime.date) and not isinstance(other, datetime.datetime)): # https://github.com/pandas-dev/pandas/issues/21152 # Compatibility for difference between Series comparison w/ # datetime and date msg = ( "Comparing Series of datetimes with 'datetime.date'. " "Currently, the 'datetime.date' is coerced to a " "datetime. In the future pandas will not coerce, " "and {future}. " "To retain the current behavior, " "convert the 'datetime.date' to a datetime with " "'pd.Timestamp'." ) if op in {operator.lt, operator.le, operator.gt, operator.ge}: future = "a TypeError will be raised" else: future = ( "'the values will not compare equal to the " "'datetime.date'" ) msg = '\n'.join(textwrap.wrap(msg.format(future=future))) warnings.warn(msg, FutureWarning, stacklevel=2) other = pd.Timestamp(other) res_values = dispatch_to_index_op(op, self, other, pd.DatetimeIndex) return self._constructor(res_values, index=self.index, name=res_name) elif is_timedelta64_dtype(self): res_values = dispatch_to_index_op(op, self, other, pd.TimedeltaIndex) return self._constructor(res_values, index=self.index, name=res_name) elif (is_extension_array_dtype(self) or (is_extension_array_dtype(other) and not is_scalar(other))): # Note: the `not is_scalar(other)` condition rules out # e.g. other == "category" return dispatch_to_extension_op(op, self, other) elif isinstance(other, ABCSeries): # By this point we have checked that self._indexed_same(other) res_values = na_op(self.values, other.values) # rename is needed in case res_name is None and res_values.name # is not. return self._constructor(res_values, index=self.index, name=res_name).rename(res_name) elif isinstance(other, (np.ndarray, pd.Index)): # do not check length of zerodim array # as it will broadcast if other.ndim != 0 and len(self) != len(other): raise ValueError('Lengths must match to compare') res_values = na_op(self.values, np.asarray(other)) result = self._constructor(res_values, index=self.index) # rename is needed in case res_name is None and self.name # is not. return result.__finalize__(self).rename(res_name) elif is_scalar(other) and isna(other): # numpy does not like comparisons vs None if op is operator.ne: res_values = np.ones(len(self), dtype=bool) else: res_values = np.zeros(len(self), dtype=bool) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') else: values = self.get_values() with np.errstate(all='ignore'): res = na_op(values, other) if is_scalar(res): raise TypeError('Could not compare {typ} type with Series' .format(typ=type(other))) # always return a full value series here res_values = com.values_from_object(res) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') wrapper.__name__ = op_name return wrapper def _bool_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def na_op(x, y): try: result = op(x, y) except TypeError: assert not isinstance(y, (list, ABCSeries, ABCIndexClass)) if isinstance(y, np.ndarray): # bool-bool dtype operations should be OK, should not get here assert not (is_bool_dtype(x) and is_bool_dtype(y)) x = ensure_object(x) y = ensure_object(y) result = libops.vec_binop(x, y, op) else: # let null fall thru assert lib.is_scalar(y) if not isna(y): y = bool(y) try: result = libops.scalar_binop(x, y, op) except (TypeError, ValueError, AttributeError, OverflowError, NotImplementedError): raise TypeError("cannot compare a dtyped [{dtype}] array " "with a scalar of type [{typ}]" .format(dtype=x.dtype, typ=type(y).__name__)) return result fill_int = lambda x: x.fillna(0) fill_bool = lambda x: x.fillna(False).astype(bool) def wrapper(self, other): is_self_int_dtype = is_integer_dtype(self.dtype) self, other = _align_method_SERIES(self, other, align_asobject=True) res_name = get_op_result_name(self, other) if isinstance(other, ABCDataFrame): # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, (ABCSeries, ABCIndexClass)): is_other_int_dtype = is_integer_dtype(other.dtype) other = fill_int(other) if is_other_int_dtype else fill_bool(other) ovalues = other.values finalizer = lambda x: x else: # scalars, list, tuple, np.array is_other_int_dtype = is_integer_dtype(np.asarray(other)) if is_list_like(other) and not isinstance(other, np.ndarray): # TODO: Can we do this before the is_integer_dtype check? # could the is_integer_dtype check be checking the wrong # thing? e.g. other = [[0, 1], [2, 3], [4, 5]]? other = construct_1d_object_array_from_listlike(other) ovalues = other finalizer = lambda x: x.__finalize__(self) # For int vs int `^`, `|`, `&` are bitwise operators and return # integer dtypes. Otherwise these are boolean ops filler = (fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool) res_values = na_op(self.values, ovalues) unfilled = self._constructor(res_values, index=self.index, name=res_name) filled = filler(unfilled) return finalizer(filled) wrapper.__name__ = op_name return wrapper def _flex_method_SERIES(cls, op, special): name = _get_op_name(op, special) doc = _make_flex_doc(name, 'series') @Appender(doc) def flex_wrapper(self, other, level=None, fill_value=None, axis=0): # validate axis if axis is not None: self._get_axis_number(axis) if isinstance(other, ABCSeries): return self._binop(other, op, level=level, fill_value=fill_value) elif isinstance(other, (np.ndarray, list, tuple)): if len(other) != len(self): raise ValueError('Lengths must be equal') other = self._constructor(other, self.index) return self._binop(other, op, level=level, fill_value=fill_value) else: if fill_value is not None: self = self.fillna(fill_value) return self._constructor(op(self, other), self.index).__finalize__(self) flex_wrapper.__name__ = name return flex_wrapper # ----------------------------------------------------------------------------- # DataFrame def _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None): """ Apply binary operator `func` to self, other using alignment and fill conventions determined by the fill_value, axis, and level kwargs. Parameters ---------- self : DataFrame other : Series func : binary operator fill_value : object, default None axis : {0, 1, 'columns', 'index', None}, default None level : int or None, default None Returns ------- result : DataFrame """ if fill_value is not None: raise NotImplementedError("fill_value {fill} not supported." .format(fill=fill_value)) if axis is not None: axis = self._get_axis_number(axis) if axis == 0: return self._combine_match_index(other, func, level=level) else: return self._combine_match_columns(other, func, level=level) else: if not len(other): return self * np.nan if not len(self): # Ambiguous case, use _series so works with DataFrame return self._constructor(data=self._series, index=self.index, columns=self.columns) # default axis is columns return self._combine_match_columns(other, func, level=level) def _align_method_FRAME(left, right, axis): """ convert rhs to meet lhs dims if input is list, tuple or np.ndarray """ def to_series(right): msg = ('Unable to coerce to Series, length must be {req_len}: ' 'given {given_len}') if axis is not None and left._get_axis_name(axis) == 'index': if len(left.index) != len(right): raise ValueError(msg.format(req_len=len(left.index), given_len=len(right))) right = left._constructor_sliced(right, index=left.index) else: if len(left.columns) != len(right): raise ValueError(msg.format(req_len=len(left.columns), given_len=len(right))) right = left._constructor_sliced(right, index=left.columns) return right if isinstance(right, np.ndarray): if right.ndim == 1: right = to_series(right) elif right.ndim == 2: if right.shape == left.shape: right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[0] == left.shape[0] and right.shape[1] == 1: # Broadcast across columns right = np.broadcast_to(right, left.shape) right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[1] == left.shape[1] and right.shape[0] == 1: # Broadcast along rows right = to_series(right[0, :]) else: raise ValueError("Unable to coerce to DataFrame, shape " "must be {req_shape}: given {given_shape}" .format(req_shape=left.shape, given_shape=right.shape)) elif right.ndim > 2: raise ValueError('Unable to coerce to Series/DataFrame, dim ' 'must be <= 2: {dim}'.format(dim=right.shape)) elif (is_list_like(right) and not isinstance(right, (ABCSeries, ABCDataFrame))): # GH17901 right = to_series(right) return right def _arith_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result if op_name in _op_descriptions: # i.e. include "add" but not "__add__" doc = _make_flex_doc(op_name, 'dataframe') else: doc = _arith_doc_FRAME % op_name @Appender(doc) def f(self, other, axis=default_axis, level=None, fill_value=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame pass_op = op if should_series_dispatch(self, other, op) else na_op return self._combine_frame(other, pass_op, fill_value, level) elif isinstance(other, ABCSeries): # For these values of `axis`, we end up dispatching to Series op, # so do not want the masked op. pass_op = op if axis in [0, "columns", None] else na_op return _combine_series_frame(self, other, pass_op, fill_value=fill_value, axis=axis, level=level) else: if fill_value is not None: self = self.fillna(fill_value) assert np.ndim(other) == 0 return self._combine_const(other, op) f.__name__ = op_name return f def _flex_comp_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): try: with np.errstate(invalid='ignore'): result = op(x, y) except TypeError: result = mask_cmp_op(x, y, op) return result doc = _flex_comp_doc_FRAME.format(op_name=op_name, desc=_op_descriptions[op_name]['desc']) @Appender(doc) def f(self, other, axis=default_axis, level=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): self, other = self.align(other, 'outer', level=level, copy=False) return dispatch_to_series(self, other, na_op, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, na_op, fill_value=None, axis=axis, level=level) else: assert np.ndim(other) == 0, other return self._combine_const(other, na_op) f.__name__ = op_name return f def _comp_method_FRAME(cls, func, special): str_rep = _get_opstr(func, cls) op_name = _get_op_name(func, special) @Appender('Wrapper for comparison method {name}'.format(name=op_name)) def f(self, other): other = _align_method_FRAME(self, other, axis=None) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): raise ValueError('Can only compare identically-labeled ' 'DataFrame objects') return dispatch_to_series(self, other, func, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None) else: # straight boolean comparisons we want to allow all columns # (regardless of dtype to pass thru) See #4537 for discussion. res = self._combine_const(other, func) return res.fillna(True).astype(bool) f.__name__ = op_name return f # ----------------------------------------------------------------------------- # Sparse def _cast_sparse_series_op(left, right, opname): """ For SparseSeries operation, coerce to float64 if the result is expected to have NaN or inf values Parameters ---------- left : SparseArray right : SparseArray opname : str Returns ------- left : SparseArray right : SparseArray """ from pandas.core.sparse.api import SparseDtype opname = opname.strip('_') # TODO: This should be moved to the array? if is_integer_dtype(left) and is_integer_dtype(right): # series coerces to float64 if result should have NaN/inf if opname in ('floordiv', 'mod') and (right.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) elif opname in ('rfloordiv', 'rmod') and (left.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) return left, right def _arith_method_SPARSE_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): if isinstance(other, ABCDataFrame): return NotImplemented elif isinstance(other, ABCSeries): if not isinstance(other, ABCSparseSeries): other = other.to_sparse(fill_value=self.fill_value) return _sparse_series_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): new_values = op(self.values, other) return self._constructor(new_values, index=self.index, name=self.name) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper def _sparse_series_op(left, right, op, name): left, right = left.align(right, join='outer', copy=False) new_index = left.index new_name = get_op_result_name(left, right) from pandas.core.arrays.sparse import _sparse_array_op lvalues, rvalues = _cast_sparse_series_op(left.values, right.values, name) result = _sparse_array_op(lvalues, rvalues, op, name) return left._constructor(result, index=new_index, name=new_name) def _arith_method_SPARSE_ARRAY(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): from pandas.core.arrays.sparse.array import ( SparseArray, _sparse_array_op, _wrap_result, _get_fill) if isinstance(other, np.ndarray): if len(self) != len(other): raise AssertionError("length mismatch: {self} vs. {other}" .format(self=len(self), other=len(other))) if not isinstance(other, SparseArray): dtype = getattr(other, 'dtype', None) other = SparseArray(other, fill_value=self.fill_value, dtype=dtype) return _sparse_array_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): fill = op(_get_fill(self), np.asarray(other)) result = op(self.sp_values, other) return _wrap_result(op_name, result, self.sp_index, fill) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper
import numpy as np import pytest from pandas import DataFrame, MultiIndex, Series from pandas.core import common as com import pandas.util.testing as tm def test_detect_chained_assignment(): # Inplace ops, originally from: # http://stackoverflow.com/questions/20508968/series-fillna-in-a-multiindex-dataframe-does-not-fill-is-this-a-bug a = [12, 23] b = [123, None] c = [1234, 2345] d = [12345, 23456] tuples = [('eyes', 'left'), ('eyes', 'right'), ('ears', 'left'), ('ears', 'right')] events = {('eyes', 'left'): a, ('eyes', 'right'): b, ('ears', 'left'): c, ('ears', 'right'): d} multiind = MultiIndex.from_tuples(tuples, names=['part', 'side']) zed = DataFrame(events, index=['a', 'b'], columns=multiind) with pytest.raises(com.SettingWithCopyError): zed['eyes']['right'].fillna(value=555, inplace=True) def test_cache_updating(): # 5216 # make sure that we don't try to set a dead cache a = np.random.rand(10, 3) df = DataFrame(a, columns=['x', 'y', 'z']) tuples = [(i, j) for i in range(5) for j in range(2)] index = MultiIndex.from_tuples(tuples) df.index = index # setting via chained assignment # but actually works, since everything is a view df.loc[0]['z'].iloc[0] = 1. result = df.loc[(0, 0), 'z'] assert result == 1 # correct setting df.loc[(0, 0), 'z'] = 2 result = df.loc[(0, 0), 'z'] assert result == 2 def test_indexer_caching(): # GH5727 # make sure that indexers are in the _internal_names_set n = 1000001 arrays = (range(n), range(n)) index = MultiIndex.from_tuples(zip(*arrays)) s = Series(np.zeros(n), index=index) str(s) # setitem expected = Series(np.ones(n), index=index) s = Series(np.zeros(n), index=index) s[s == 0] = 1 tm.assert_series_equal(s, expected)
cbertinato/pandas
pandas/tests/indexing/multiindex/test_chaining_and_caching.py
pandas/core/ops.py
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file './pyqtgraph/graphicsItems/ViewBox/axisCtrlTemplate.ui' # # Created: Mon Dec 23 10:10:51 2013 # by: pyside-uic 0.2.14 running on PySide 1.1.2 # # WARNING! All changes made in this file will be lost! from PySide import QtCore, QtGui class Ui_Form(object): def setupUi(self, Form): Form.setObjectName("Form") Form.resize(186, 154) Form.setMaximumSize(QtCore.QSize(200, 16777215)) self.gridLayout = QtGui.QGridLayout(Form) self.gridLayout.setContentsMargins(0, 0, 0, 0) self.gridLayout.setSpacing(0) self.gridLayout.setObjectName("gridLayout") self.label = QtGui.QLabel(Form) self.label.setObjectName("label") self.gridLayout.addWidget(self.label, 7, 0, 1, 2) self.linkCombo = QtGui.QComboBox(Form) self.linkCombo.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents) self.linkCombo.setObjectName("linkCombo") self.gridLayout.addWidget(self.linkCombo, 7, 2, 1, 2) self.autoPercentSpin = QtGui.QSpinBox(Form) self.autoPercentSpin.setEnabled(True) self.autoPercentSpin.setMinimum(1) self.autoPercentSpin.setMaximum(100) self.autoPercentSpin.setSingleStep(1) self.autoPercentSpin.setProperty("value", 100) self.autoPercentSpin.setObjectName("autoPercentSpin") self.gridLayout.addWidget(self.autoPercentSpin, 2, 2, 1, 2) self.autoRadio = QtGui.QRadioButton(Form) self.autoRadio.setChecked(True) self.autoRadio.setObjectName("autoRadio") self.gridLayout.addWidget(self.autoRadio, 2, 0, 1, 2) self.manualRadio = QtGui.QRadioButton(Form) self.manualRadio.setObjectName("manualRadio") self.gridLayout.addWidget(self.manualRadio, 1, 0, 1, 2) self.minText = QtGui.QLineEdit(Form) self.minText.setObjectName("minText") self.gridLayout.addWidget(self.minText, 1, 2, 1, 1) self.maxText = QtGui.QLineEdit(Form) self.maxText.setObjectName("maxText") self.gridLayout.addWidget(self.maxText, 1, 3, 1, 1) self.invertCheck = QtGui.QCheckBox(Form) self.invertCheck.setObjectName("invertCheck") self.gridLayout.addWidget(self.invertCheck, 5, 0, 1, 4) self.mouseCheck = QtGui.QCheckBox(Form) self.mouseCheck.setChecked(True) self.mouseCheck.setObjectName("mouseCheck") self.gridLayout.addWidget(self.mouseCheck, 6, 0, 1, 4) self.visibleOnlyCheck = QtGui.QCheckBox(Form) self.visibleOnlyCheck.setObjectName("visibleOnlyCheck") self.gridLayout.addWidget(self.visibleOnlyCheck, 3, 2, 1, 2) self.autoPanCheck = QtGui.QCheckBox(Form) self.autoPanCheck.setObjectName("autoPanCheck") self.gridLayout.addWidget(self.autoPanCheck, 4, 2, 1, 2) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8)) self.label.setText(QtGui.QApplication.translate("Form", "Link Axis:", None, QtGui.QApplication.UnicodeUTF8)) self.linkCombo.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Links this axis with another view. When linked, both views will display the same data range.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.autoPercentSpin.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Percent of data to be visible when auto-scaling. It may be useful to decrease this value for data with spiky noise.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.autoPercentSpin.setSuffix(QtGui.QApplication.translate("Form", "%", None, QtGui.QApplication.UnicodeUTF8)) self.autoRadio.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Automatically resize this axis whenever the displayed data is changed.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.autoRadio.setText(QtGui.QApplication.translate("Form", "Auto", None, QtGui.QApplication.UnicodeUTF8)) self.manualRadio.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Set the range for this axis manually. This disables automatic scaling. </p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.manualRadio.setText(QtGui.QApplication.translate("Form", "Manual", None, QtGui.QApplication.UnicodeUTF8)) self.minText.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Minimum value to display for this axis.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.minText.setText(QtGui.QApplication.translate("Form", "0", None, QtGui.QApplication.UnicodeUTF8)) self.maxText.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Maximum value to display for this axis.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.maxText.setText(QtGui.QApplication.translate("Form", "0", None, QtGui.QApplication.UnicodeUTF8)) self.invertCheck.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Inverts the display of this axis. (+y points downward instead of upward)</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.invertCheck.setText(QtGui.QApplication.translate("Form", "Invert Axis", None, QtGui.QApplication.UnicodeUTF8)) self.mouseCheck.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Enables mouse interaction (panning, scaling) for this axis.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.mouseCheck.setText(QtGui.QApplication.translate("Form", "Mouse Enabled", None, QtGui.QApplication.UnicodeUTF8)) self.visibleOnlyCheck.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>When checked, the axis will only auto-scale to data that is visible along the orthogonal axis.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.visibleOnlyCheck.setText(QtGui.QApplication.translate("Form", "Visible Data Only", None, QtGui.QApplication.UnicodeUTF8)) self.autoPanCheck.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>When checked, the axis will automatically pan to center on the current data, but the scale along this axis will not change.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8)) self.autoPanCheck.setText(QtGui.QApplication.translate("Form", "Auto Pan Only", None, QtGui.QApplication.UnicodeUTF8))
import gc import weakref import pytest # try: # import faulthandler # faulthandler.enable() # except ImportError: # pass from pyqtgraph.Qt import QtCore, QtGui, QtTest import numpy as np import pyqtgraph as pg app = pg.mkQApp() @pytest.mark.skipif(pg.Qt.USE_PYSIDE, reason="pyside does not have qWait") def test_dividebyzero(): import pyqtgraph as pg im = pg.image(pg.np.random.normal(size=(100,100))) im.imageItem.setAutoDownsample(True) im.view.setRange(xRange=[-5+25, 5e+25],yRange=[-5e+25, 5e+25]) app.processEvents() QtTest.QTest.qWait(1000) # must manually call im.imageItem.render here or the exception # will only exist on the Qt event loop im.imageItem.render()
ddamiani/pyqtgraph
pyqtgraph/graphicsItems/tests/test_ImageItem.py
pyqtgraph/graphicsItems/ViewBox/axisCtrlTemplate_pyside.py
from insights.client.auto_config import set_auto_configuration from mock.mock import Mock, patch @patch("insights.client.auto_config.InsightsConnection") def test_sat_branch_info_called(connection): ''' When is_satellite is True, means we're on sat. get_branch_info should be called. ''' config = Mock(base_url=None, upload_url=None, legacy_upload=False) set_auto_configuration(config, 'test.com:443/redhat_access', 'some_cert', None, True, False) connection.return_value.get_branch_info.assert_called_once() @patch("insights.client.auto_config.InsightsConnection") def test_rhsm_branch_info_not_called(connection): ''' When is_satellite is False, means we're on direct RHSM. get_branch_info should not be called. ''' config = Mock(base_url=None, upload_url=None, legacy_upload=False) set_auto_configuration(config, 'cert-api.access.redhat.com', None, None, False, False) connection.return_value.get_branch_info.assert_not_called()
from insights.plugins.ps_rule_fakes import psaux_no_filter, psauxww_ds_filter, psalxwww_parser_filter from insights.specs import Specs from . import InputData, run_test import pytest def test_run_test_missing_filters_exception(): """ The rule underlying datasource requires a filter, an exception should be raised because filter was not added in the rule module. """ input_data = InputData("fake_input") input_data.add(Specs.ps_aux, "FAKE_CONTENT") with pytest.raises(Exception): run_test(psaux_no_filter, input_data, None) def test_run_test_no_missing_filters_using_datasource(): """ Required filter was added directly to the datasouce, ``run_test`` should complete without any exceptions. """ input_data = InputData("fake_input") input_data.add(Specs.ps_auxww, "FAKE_CONTENT") result = run_test(psauxww_ds_filter, input_data, None) assert result def test_run_test_no_missing_filters_using_parser(): """ Required filter was added to using the parser, ``run_test`` should complete without any exceptions. """ input_data = InputData("fake_input") input_data.add(Specs.ps_alxwww, "FAKE_CONTENT") result = run_test(psalxwww_parser_filter, input_data, None) assert result
RedHatInsights/insights-core
insights/tests/test_integration_support.py
insights/tests/client/auto_config/test_branch_info_call.py
import requests import json from insights.client.connection import InsightsConnection from mock.mock import MagicMock, Mock, patch @patch("insights.client.connection.generate_machine_id", return_value='xxxxxx') @patch("insights.client.connection.InsightsConnection._init_session") @patch("insights.client.connection.InsightsConnection.get_proxies") def test_registration_check_ok_reg(get_proxies, _init_session, _): ''' Request completed OK, registered Returns True ''' config = Mock(legacy_upload=True, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = json.dumps({'unregistered_at': None}) res.status_code = 200 conn.session.get = MagicMock(return_value=res) assert conn.api_registration_check() @patch("insights.client.connection.generate_machine_id", return_value='xxxxxx') @patch("insights.client.connection.InsightsConnection._init_session") @patch("insights.client.connection.InsightsConnection.get_proxies") def test_registration_check_ok_reg_then_unreg(get_proxies, _init_session, _): ''' Request completed OK, was once registered but has been unregistered Returns the date it was unregistered ''' config = Mock(legacy_upload=True, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = json.dumps({'unregistered_at': '2019-04-10'}) res.status_code = 200 conn.session.get = MagicMock(return_value=res) assert conn.api_registration_check() == '2019-04-10' @patch("insights.client.connection.generate_machine_id", return_value='xxxxxx') @patch("insights.client.connection.InsightsConnection._init_session") @patch("insights.client.connection.InsightsConnection.get_proxies") def test_registration_check_ok_unreg(get_proxies, _init_session, _): ''' Request completed OK, has never been registered Returns None ''' config = Mock(legacy_upload=True, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = json.dumps({}) res.status_code = 404 conn.session.get = MagicMock(return_value=res) assert conn.api_registration_check() is None @patch("insights.client.connection.generate_machine_id", return_value='xxxxxx') @patch("insights.client.connection.InsightsConnection._init_session") @patch("insights.client.connection.InsightsConnection.get_proxies") def test_registration_check_bad_res(get_proxies, _init_session, _): ''' Can't parse response Returns False ''' config = Mock(legacy_upload=True, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = 'zSDFasfghsRGH' res.status_code = 500 conn.session.get = MagicMock(return_value=res) assert conn.api_registration_check() is False @patch("insights.client.connection.generate_machine_id", return_value='xxxxxx') @patch("insights.client.connection.InsightsConnection._init_session") @patch("insights.client.connection.InsightsConnection.get_proxies") @patch("insights.client.connection.InsightsConnection.test_connection") def test_registration_check_conn_error(test_connection, get_proxies, _init_session, _): ''' Can't connect, run connection test Returns False ''' config = Mock(legacy_upload=True, base_url='example.com') conn = InsightsConnection(config) conn.session.get = MagicMock() conn.session.get.side_effect = requests.ConnectionError() assert conn.api_registration_check() is False test_connection.assert_called_once()
from insights.plugins.ps_rule_fakes import psaux_no_filter, psauxww_ds_filter, psalxwww_parser_filter from insights.specs import Specs from . import InputData, run_test import pytest def test_run_test_missing_filters_exception(): """ The rule underlying datasource requires a filter, an exception should be raised because filter was not added in the rule module. """ input_data = InputData("fake_input") input_data.add(Specs.ps_aux, "FAKE_CONTENT") with pytest.raises(Exception): run_test(psaux_no_filter, input_data, None) def test_run_test_no_missing_filters_using_datasource(): """ Required filter was added directly to the datasouce, ``run_test`` should complete without any exceptions. """ input_data = InputData("fake_input") input_data.add(Specs.ps_auxww, "FAKE_CONTENT") result = run_test(psauxww_ds_filter, input_data, None) assert result def test_run_test_no_missing_filters_using_parser(): """ Required filter was added to using the parser, ``run_test`` should complete without any exceptions. """ input_data = InputData("fake_input") input_data.add(Specs.ps_alxwww, "FAKE_CONTENT") result = run_test(psalxwww_parser_filter, input_data, None) assert result
RedHatInsights/insights-core
insights/tests/test_integration_support.py
insights/tests/client/connection/test_LEGACY_reg_check.py
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2020 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """The catkin plugin is useful for building ROS parts. The rosdistro used depends upon the base of the snap: - core: Uses Kinetic - core16: Uses Kinetic - core18: Uses Melodic This plugin uses the common plugin keywords as well as those for "sources". For more information check the 'plugins' topic for the former and the 'sources' topic for the latter. Additionally, this plugin uses the following plugin-specific keywords: - catkin-packages: (list of strings) List of catkin packages to build. If not specified, all packages in the workspace will be built. If set to an empty list ([]), no packages will be built. - source-space: (string) The source space containing Catkin packages. By default this is 'src'. - include-roscore: (boolean) Whether or not to include roscore with the part. Defaults to true. - rosinstall-files: (list of strings) List of rosinstall files to merge while pulling. Paths are relative to the source. - recursive-rosinstall: (boolean) Whether or not to recursively merge/update rosinstall files from fetched sources. Will continue until all rosinstall files have been merged. Defaults to false. - catkin-cmake-args: (list of strings) Configure flags to pass onto the cmake invocation from catkin. - underlay: (object) Used to inform Snapcraft that this snap isn't standalone, and is actually overlaying a workspace from another snap via content sharing. Made up of two properties: - build-path: (string) Build-time path to existing workspace to underlay the one being built, for example '$SNAPCRAFT_STAGE/opt/ros/kinetic'. - run-path: (string) Run-time path of the underlay workspace (e.g. a subdirectory of the content interface's 'target' attribute.) - catkin-ros-master-uri: (string) The URI to ros master setting the env variable ROS_MASTER_URI. Defaults to http://localhost:11311. """ import contextlib import glob import logging import os import pathlib import re import shlex import shutil import subprocess import tempfile import textwrap from typing import TYPE_CHECKING, List, Set from snapcraft import file_utils, formatting_utils from snapcraft.internal import common, errors, mangling, os_release, repo from snapcraft.internal.meta.package_repository import ( PackageRepository, PackageRepositoryApt, ) from snapcraft.plugins.v1 import PluginV1, _python, _ros if TYPE_CHECKING: from snapcraft.project import Project logger = logging.getLogger(__name__) # Map bases to ROS releases _BASE_TO_ROS_RELEASE_MAP = {"core": "kinetic", "core16": "kinetic", "core18": "melodic"} # Map bases to Ubuntu releases _BASE_TO_UBUNTU_RELEASE_MAP = {"core": "xenial", "core16": "xenial", "core18": "bionic"} _SUPPORTED_DEPENDENCY_TYPES = {"apt", "pip"} def _parse_cmake_arg(arg: str) -> str: # Parse cmake arg string that makes catkin happy. # The user can specify a list like: # catkin-cmake-args: # - -DSOMETHING=FOO # - -DCMAKE_C_FLAGS=-Wall -Werror # - -DCMAKE_CXX_FLAGS="-Wall -Werror" # Catkin can handle strings (1) and (2), but will fail on parsing (3) # because of the quotes. It will end up passing "-Wall -Werror" as a # single quoted string to c++. To work around this, we need to # evaluate the string like bash would. We can do this by using # shlex.split() and rejoining the string with spaces. # Examples: # No quotes. # >>> test = '-DCMAKE_C_FLAGS=-Wall -Werror' # >>> " ".join(shlex.split(test)) # '-DCMAKE_C_FLAGS=-Wall -Werror' # Double quotes. # >>> test2 = '-DCMAKE_CXX_FLAGS="-Wall -Werror"' # >>> " ".join(shlex.split(test2)) # '-DCMAKE_CXX_FLAGS=-Wall -Werror' # Single quotes. # >>> test3 = "-DCMAKE_CXX_FLAGS='-Wall -Werror'" # >>> " ".join(shlex.split(test3)) # '-DCMAKE_CXX_FLAGS=-Wall -Werror' # Nested quotes. # >>> test4 = '-DCMAKE_CXX_FLAGS=\"-I\'/some/path with spaces\'\" -Wall -Werror' # >>> " ".join(shlex.split(test4)) # "-DCMAKE_CXX_FLAGS=-I'/some/path with spaces' -Wall -Werror" return " ".join(shlex.split(arg)) class CatkinInvalidSystemDependencyError(errors.SnapcraftError): fmt = ( "Package {dependency!r} isn't a valid system dependency. Did you " "forget to add it to catkin-packages? If not, add the Ubuntu package " "containing it to stage-packages until you can get it into the rosdep " "database." ) def __init__(self, dependency): super().__init__(dependency=dependency) class CatkinUnsupportedDependencyTypeError(errors.SnapcraftError): fmt = ( "Package {dependency!r} resolved to an unsupported type of " "dependency: {dependency_type!r}." ) def __init__(self, dependency_type, dependency): super().__init__(dependency_type=dependency_type, dependency=dependency) class CatkinWorkspaceIsRootError(errors.SnapcraftError): fmt = "source-space cannot be the root of the Catkin workspace; use a subdirectory." class CatkinCannotResolveRoscoreError(errors.SnapcraftError): fmt = "Failed to determine system dependency for roscore." class CatkinAptDependencyFetchError(errors.SnapcraftError): fmt = "Failed to fetch apt dependencies: {message}" def __init__(self, message): super().__init__(message=message) class CatkinNoHighestVersionPathError(errors.SnapcraftError): fmt = "Failed to determine highest path in {path!r}: nothing found." def __init__(self, path): super().__init__(path=path) class CatkinGccVersionError(errors.SnapcraftError): fmt = "Failed to determine gcc version: {message}" def __init__(self, message): super().__init__(message=message) class CatkinPackagePathNotFoundError(errors.SnapcraftError): fmt = "Failed to find package path: {path!r}" def __init__(self, path): super().__init__(path=path) class CatkinPlugin(PluginV1): @classmethod def schema(cls): schema = super().schema() schema["properties"]["catkin-packages"] = { "type": "array", "minitems": 1, "uniqueItems": True, "items": {"type": "string"}, } schema["properties"]["source-space"] = {"type": "string", "default": "src"} # The default is true since we expect most Catkin packages to be ROS # packages. The only reason one wouldn't want to include ROS in the # snap is if library snaps exist, which will still likely be the # minority. schema["properties"]["include-roscore"] = {"type": "boolean", "default": True} schema["properties"]["underlay"] = { "type": "object", "properties": { "build-path": {"type": "string"}, "run-path": {"type": "string"}, }, "required": ["build-path", "run-path"], } schema["properties"]["rosinstall-files"] = { "type": "array", "minitems": 1, "uniqueItems": True, "items": {"type": "string"}, "default": [], } schema["properties"]["recursive-rosinstall"] = { "type": "boolean", "default": False, } schema["properties"]["catkin-cmake-args"] = { "type": "array", "minitems": 1, "items": {"type": "string"}, "default": [], } schema["properties"]["catkin-ros-master-uri"] = { "type": "string", "default": "http://localhost:11311", } schema["required"] = ["source"] return schema @classmethod def get_pull_properties(cls): # Inform Snapcraft of the properties associated with pulling. If these # change in the YAML Snapcraft will consider the pull step dirty. return [ "catkin-packages", "source-space", "include-roscore", "underlay", "rosinstall-files", "recursive-rosinstall", ] @classmethod def get_build_properties(cls): # Inform Snapcraft of the properties associated with building. If these # change in the YAML Snapcraft will consider the build step dirty. return ["catkin-cmake-args"] @classmethod def get_required_package_repositories(self) -> List[PackageRepository]: codename = os_release.OsRelease().version_codename() return [ PackageRepositoryApt( formats=["deb"], components=["main"], key_id="C1CF6E31E6BADE8868B172B4F42ED6FBAB17C654", url="http://packages.ros.org/ros/ubuntu/", suites=[codename], ) ] @property def _pip(self): if not self.__pip: self.__pip = _python.Pip( python_major_version="2", # ROS1 only supports python2 part_dir=self.partdir, install_dir=self.installdir, stage_dir=self.project.stage_dir, ) return self.__pip def __init__(self, name, options, project): super().__init__(name, options, project) base = self.project._get_build_base() self._rosdistro = _BASE_TO_ROS_RELEASE_MAP[base] self.build_packages.extend(["gcc", "g++", "libc6-dev", "make", "python-pip"]) self.__pip = None # roslib is the base requiremet to actually create a workspace with # setup.sh and the necessary hooks. self.stage_packages.append("ros-{}-roslib".format(self._rosdistro)) # Get a unique set of packages self.catkin_packages = None if options.catkin_packages is not None: self.catkin_packages = set(options.catkin_packages) self.stage_packages_path = pathlib.Path(self.partdir) / "catkin_stage_packages" self._rosdep_path = os.path.join(self.partdir, "rosdep") self._catkin_path = os.path.join(self.partdir, "catkin") self._wstool_path = os.path.join(self.partdir, "wstool") # The path created via the `source` key (or a combination of `source` # and `source-subdir` keys) needs to point to a valid Catkin workspace # containing another subdirectory called the "source space." By # default, this is a directory named "src," but it can be remapped via # the `source-space` key. It's important that the source space is not # the root of the Catkin workspace, since Catkin won't work that way # and it'll create a circular link that causes rosdep to hang. if self.options.source_subdir: self._ros_package_path = os.path.join( self.sourcedir, self.options.source_subdir, self.options.source_space ) else: self._ros_package_path = os.path.join( self.sourcedir, self.options.source_space ) if os.path.abspath(self.sourcedir) == os.path.abspath(self._ros_package_path): raise CatkinWorkspaceIsRootError() def env(self, root): """Runtime environment for ROS binaries and services.""" paths = common.get_library_paths(root, self.project.arch_triplet) ld_library_path = formatting_utils.combine_paths( paths, prepend="", separator=":" ) env = [ # This environment variable tells ROS nodes where to find ROS # master. It does not affect ROS master, however-- this is just the # URI. "ROS_MASTER_URI={}".format(self.options.catkin_ros_master_uri), # Various ROS tools (e.g. roscore) keep a cache or a log, # and use $ROS_HOME to determine where to put them. "ROS_HOME=${SNAP_USER_DATA:-/tmp}/ros", # FIXME: LP: #1576411 breaks ROS snaps on the desktop, so we'll # temporarily work around that bug by forcing the locale to # C.UTF-8. "LC_ALL=C.UTF-8", # The Snapcraft Core will ensure that we get a good LD_LIBRARY_PATH # overall, but it defines it after this function runs. Some ROS # tools will cause binaries to be run when we source the setup.sh, # below, so we need to have a sensible LD_LIBRARY_PATH before then. "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{}".format(ld_library_path), ] # There's a chicken and egg problem here, everything run gets an # env built, even package installation, so the first runs for these # will likely fail. try: # The ROS packaging system tools (e.g. rospkg, etc.) don't go # into the ROS install path (/opt/ros/$distro), so we need the # PYTHONPATH to include the dist-packages in /usr/lib as well. # # Note: Empty segments in PYTHONPATH are interpreted as `.`, thus # adding the current working directory to the PYTHONPATH. That is # not desired in this situation, so take proper precautions when # expanding PYTHONPATH: only add it if it's not empty. env.append( "PYTHONPATH={}${{PYTHONPATH:+:$PYTHONPATH}}".format( common.get_python2_path(root) ) ) except errors.SnapcraftEnvironmentError as e: logger.debug(e) # The setup.sh we source below requires the in-snap python. Here we # make sure it's in the PATH before it's run. env.append("PATH=$PATH:{}/usr/bin".format(root)) if self.options.underlay: script = textwrap.dedent( """ if [ -f {snapcraft_setup} ]; then . {snapcraft_setup} fi """ ).format(snapcraft_setup=os.path.join(self.rosdir, "snapcraft-setup.sh")) else: script = self._source_setup_sh(root, None) # Each of these lines is prepended with an `export` when the # environment is actually generated. In order to inject real shell code # we have to hack it in by appending it on the end of an item already # in the environment. FIXME: There should be a better way to do this. # LP: #1792034 env[-1] = env[-1] + "\n\n" + script return env def pull(self): """Copy source into build directory and fetch dependencies. Catkin packages can specify their system dependencies in their package.xml. In order to support that, the Catkin packages are interrogated for their dependencies here. Since `stage-packages` are already installed by the time this function is run, the dependencies from the package.xml are pulled down explicitly. """ super().pull() # There may be nothing contained within the source but a rosinstall # file. We need to use it to flesh out the workspace before continuing # with the pull. if self.options.rosinstall_files or self.options.recursive_rosinstall: wstool = _ros.wstool.Wstool( self._ros_package_path, self._wstool_path, self.project, self.project._get_build_base(), ) wstool.setup() source_path = self.sourcedir if self.options.source_subdir: source_path = os.path.join(self.sourcedir, self.options.source_subdir) # Recursively handling rosinstall files is a superset of handling # individual rosinstall files. If both are specified, the recursive # option will cover it. if self.options.recursive_rosinstall: _recursively_handle_rosinstall_files(wstool, source_path) else: # The rosinstall files in the YAML are relative to the part's # source. However, _handle_rosinstall_files requires absolute # paths. rosinstall_files = set() for rosinstall_file in self.options.rosinstall_files: rosinstall_files.add(os.path.join(source_path, rosinstall_file)) _handle_rosinstall_files(wstool, rosinstall_files) # Make sure the package path exists before continuing. We only care # about doing this if there are actually packages to build, which is # indicated both by self.catkin_packages being None as well as a # non-empty list. packages_to_build = ( self.catkin_packages is None or len(self.catkin_packages) > 0 ) if packages_to_build and not os.path.exists(self._ros_package_path): raise CatkinPackagePathNotFoundError(self._ros_package_path) # Validate the underlay. Note that this validation can't happen in # __init__ as the underlay will probably only be valid once a # dependency has been staged. catkin = None underlay_build_path = None dependency_workspaces = [self.rosdir] if self.options.underlay: underlay_build_path = self.options.underlay["build-path"] if underlay_build_path: if not os.path.isdir(underlay_build_path): raise errors.SnapcraftEnvironmentError( "Requested underlay ({!r}) does not point to a valid " "directory".format(underlay_build_path) ) if not os.path.isfile(os.path.join(underlay_build_path, "setup.sh")): raise errors.SnapcraftEnvironmentError( "Requested underlay ({!r}) does not contain a " "setup.sh".format(underlay_build_path) ) dependency_workspaces.append(underlay_build_path) self._generate_snapcraft_setup_sh(self.installdir, underlay_build_path) # Use catkin_find to discover dependencies already in the underlay catkin = _Catkin( self._rosdistro, dependency_workspaces, self._catkin_path, self.project ) catkin.setup() # Use rosdep for dependency detection and resolution rosdep = _ros.rosdep.Rosdep( ros_distro=self._rosdistro, ros_version="1", ros_package_path=self._ros_package_path, rosdep_path=self._rosdep_path, ubuntu_distro=_BASE_TO_UBUNTU_RELEASE_MAP[self.project._get_build_base()], base=self.project._get_build_base(), target_arch=self.project._get_stage_packages_target_arch(), ) rosdep.setup() self._setup_dependencies(rosdep, catkin) def _setup_dependencies(self, rosdep, catkin): # Parse the Catkin packages to pull out their system dependencies system_dependencies = _find_system_dependencies( self.catkin_packages, rosdep, catkin ) # If the package requires roscore, resolve it into a system dependency # as well. if self.options.include_roscore: roscore = rosdep.resolve_dependency("ros_core") if roscore: for dependency_type, dependencies in roscore.items(): if dependency_type not in system_dependencies: system_dependencies[dependency_type] = set() system_dependencies[dependency_type] |= dependencies else: raise CatkinCannotResolveRoscoreError() # Pull down and install any apt dependencies that were discovered self._setup_apt_dependencies(system_dependencies.get("apt")) # Pull down and install any pip dependencies that were discovered self._setup_pip_dependencies(system_dependencies.get("pip")) def _setup_apt_dependencies(self, apt_dependencies): if not apt_dependencies: return logger.info("Installing apt dependencies...") try: repo.Ubuntu.fetch_stage_packages( package_names=apt_dependencies, stage_packages_path=self.stage_packages_path, base=self.project._get_build_base(), target_arch=self.project._get_stage_packages_target_arch(), ) except repo.errors.PackageNotFoundError as e: raise CatkinAptDependencyFetchError(e.message) repo.Ubuntu.unpack_stage_packages( stage_packages_path=self.stage_packages_path, install_path=pathlib.Path(self.installdir), ) def _setup_pip_dependencies(self, pip_dependencies): if pip_dependencies: self._pip.setup() logger.info("Fetching pip dependencies...") self._pip.download(pip_dependencies) logger.info("Installing pip dependencies...") self._pip.install(pip_dependencies) def clean_pull(self): super().clean_pull() # Remove the rosdep path, if any with contextlib.suppress(FileNotFoundError): shutil.rmtree(self._rosdep_path) # Remove the catkin path, if any with contextlib.suppress(FileNotFoundError): shutil.rmtree(self._catkin_path) # Remove the catkin path, if any with contextlib.suppress(FileNotFoundError): shutil.rmtree(self.stage_packages_path) # Clean pip packages, if any self._pip.clean_packages() def _source_setup_sh(self, root, underlay_path): rosdir = os.path.join(root, "opt", "ros", self._rosdistro) if underlay_path: source_script = textwrap.dedent( """ if [ -f {underlay_setup} ]; then set -- --local _CATKIN_SETUP_DIR={underlay} . {underlay_setup} if [ -f {rosdir_setup} ]; then set -- --local --extend _CATKIN_SETUP_DIR={rosdir} . {rosdir_setup} fi fi """ ).format( underlay=underlay_path, underlay_setup=os.path.join(underlay_path, "setup.sh"), rosdir=rosdir, rosdir_setup=os.path.join(rosdir, "setup.sh"), ) else: source_script = textwrap.dedent( """ if [ -f {rosdir_setup} ]; then set -- --local _CATKIN_SETUP_DIR={rosdir} . {rosdir_setup} fi """ ).format(rosdir=rosdir, rosdir_setup=os.path.join(rosdir, "setup.sh")) # We need to source ROS's setup.sh at this point. However, it accepts # arguments (thus will parse $@), and we really don't want it to, since # $@ in this context will be meant for the app being launched # (LP: #1660852). So we'll backup all args, source the setup.sh, then # restore all args for the wrapper's `exec` line. return textwrap.dedent( """ # Shell quote arbitrary string by replacing every occurrence of ' # with '\\'', then put ' at the beginning and end of the string. # Prepare yourself, fun regex ahead. quote() {{ for i; do printf %s\\\\n "$i" | sed "s/\'/\'\\\\\\\\\'\'/g;1s/^/\'/;\$s/\$/\' \\\\\\\\/" done echo " " }} BACKUP_ARGS=$(quote "$@") set -- {} eval "set -- $BACKUP_ARGS" """ # noqa: W605 ).format( source_script ) # noqa def _generate_snapcraft_setup_sh(self, root, underlay_path): script = self._source_setup_sh(root, underlay_path) os.makedirs(self.rosdir, exist_ok=True) with open(os.path.join(self.rosdir, "snapcraft-setup.sh"), "w") as f: f.write(script) @property def rosdir(self): return os.path.join(self.installdir, "opt", "ros", self._rosdistro) def build(self): """Build Catkin packages. This function runs some pre-build steps to prepare the sources for building in the Snapcraft environment, builds the packages via catkin_make_isolated, and finally runs some post-build clean steps to prepare the newly-minted install to be packaged as a .snap. """ super().build() logger.info("Preparing to build Catkin packages...") self._prepare_build() logger.info("Building Catkin packages...") self._build_catkin_packages() logger.info("Cleaning up newly installed Catkin packages...") self._finish_build() def _prepare_build(self): self._use_in_snap_python() # Each Catkin package distributes .cmake files so they can be found via # find_package(). However, the Ubuntu packages pulled down as # dependencies contain .cmake files pointing to system paths (e.g. # /usr/lib, /usr/include, etc.). They need to be rewritten to point to # the install directory. def _new_path(path): if not path.startswith(self.installdir): # Not using os.path.join here as `path` is absolute. return self.installdir + path return path self._rewrite_cmake_paths(_new_path) # Also rewrite any occurrence of $SNAPCRAFT_STAGE to be our install # directory (this may be the case if stage-snaps were used). file_utils.replace_in_file( self.rosdir, re.compile(r".*Config.cmake$"), re.compile(r"\$ENV{SNAPCRAFT_STAGE}"), self.installdir, ) def _rewrite_cmake_paths(self, new_path_callable): def _rewrite_paths(match): paths = match.group(1).strip().split(";") for i, path in enumerate(paths): # Offer the opportunity to rewrite this path if it's absolute. if os.path.isabs(path): paths[i] = new_path_callable(path) return '"' + ";".join(paths) + '"' # Looking for any path-like string file_utils.replace_in_file( self.rosdir, re.compile(r".*Config.cmake$"), re.compile(r'"(.*?/.*?)"'), _rewrite_paths, ) def _finish_build(self): self._use_in_snap_python() # We've finished the build, but we need to make sure we turn the cmake # files back into something that doesn't include our installdir. This # way it's usable from the staging area, and won't clash with the same # file coming from other parts. pattern = re.compile(r"^{}".format(self.installdir)) def _new_path(path): return pattern.sub("$ENV{SNAPCRAFT_STAGE}", path) self._rewrite_cmake_paths(_new_path) # Replace the CMAKE_PREFIX_PATH in _setup_util.sh setup_util_file = os.path.join(self.rosdir, "_setup_util.py") if os.path.isfile(setup_util_file): with open(setup_util_file, "r+") as f: pattern = re.compile(r"CMAKE_PREFIX_PATH = '.*/opt/ros.*") replaced = pattern.sub("CMAKE_PREFIX_PATH = []", f.read()) f.seek(0) f.truncate() f.write(replaced) # Set the _CATKIN_SETUP_DIR in setup.sh to a sensible default, removing # our installdir (this way it doesn't clash with a setup.sh coming # from another part). setup_sh_file = os.path.join(self.rosdir, "setup.sh") if os.path.isfile(setup_sh_file): with open(setup_sh_file, "r+") as f: pattern = re.compile(r"\${_CATKIN_SETUP_DIR:=.*}") replaced = pattern.sub( "${{_CATKIN_SETUP_DIR:=$SNAP/opt/ros/{}}}".format(self._rosdistro), f.read(), ) f.seek(0) f.truncate() f.write(replaced) if self.options.underlay: underlay_run_path = self.options.underlay["run-path"] self._generate_snapcraft_setup_sh("$SNAP", underlay_run_path) # If pip dependencies were installed, generate a sitecustomize that # allows access to them. if self._pip.is_setup() and self._pip.list(user=True): _python.generate_sitecustomize( "2", stage_dir=self.project.stage_dir, install_dir=self.installdir ) def _use_in_snap_python(self): # Fix all shebangs to use the in-snap python. mangling.rewrite_python_shebangs(self.installdir) # Also replace all the /usr/bin/python calls in etc/catkin/profile.d/ # files with the in-snap python profile_d_path = os.path.join(self.rosdir, "etc", "catkin", "profile.d") file_utils.replace_in_file( profile_d_path, re.compile(r""), re.compile(r"/usr/bin/python"), r"python" ) def _parse_cmake_args(self): args: List[str] = list() for arg in self.options.catkin_cmake_args: cmake_arg = " ".join(shlex.split(arg)) args.append(cmake_arg) return args def _build_catkin_packages(self): # Nothing to do if no packages were specified if self.catkin_packages is not None and len(self.catkin_packages) == 0: return catkincmd = ["catkin_make_isolated"] # Install the package catkincmd.append("--install") if self.catkin_packages: # Specify the packages to be built catkincmd.append("--pkg") catkincmd.extend(self.catkin_packages) # Don't clutter the real ROS workspace-- use the Snapcraft build # directory catkincmd.extend(["--directory", self.builddir]) # Account for a non-default source space by always specifying it catkincmd.extend( ["--source-space", os.path.join(self.builddir, self.options.source_space)] ) # Specify that the package should be installed along with the rest of # the ROS distro. catkincmd.extend(["--install-space", self.rosdir]) # Specify the number of workers catkincmd.append("-j{}".format(self.parallel_build_count)) # All the arguments that follow are meant for CMake catkincmd.append("--cmake-args") build_type = "Release" if "debug" in self.options.build_attributes: build_type = "Debug" catkincmd.append("-DCMAKE_BUILD_TYPE={}".format(build_type)) # Finally, add any cmake-args requested from the plugin options catkincmd.extend(self._parse_cmake_args()) self.run(catkincmd) def snap_fileset(self): """Filter useless files out of the snap. - opt/ros/<rosdistro>/.rosinstall points to the part installdir, and isn't useful from the snap anyway. """ fileset = super().snap_fileset() fileset.append( "-{}".format(os.path.join("opt", "ros", self._rosdistro, ".rosinstall")) ) return fileset def _find_system_dependencies(catkin_packages, rosdep, catkin): """Find system dependencies for a given set of Catkin packages.""" resolved_dependencies = {} dependencies = set() logger.info("Determining system dependencies for Catkin packages...") if catkin_packages is not None: for package in catkin_packages: # Query rosdep for the list of dependencies for this package dependencies |= rosdep.get_dependencies(package) else: # Rather than getting dependencies for an explicit list of packages, # let's get the dependencies for the entire workspace. dependencies |= rosdep.get_dependencies() for dependency in dependencies: _resolve_package_dependencies( catkin_packages, dependency, catkin, rosdep, resolved_dependencies ) # We currently have nested dict structure of: # dependency name -> package type -> package names # # We want to return a flattened dict of package type -> package names. flattened_dependencies = {} for dependency_types in resolved_dependencies.values(): for key, value in dependency_types.items(): if key not in flattened_dependencies: flattened_dependencies[key] = set() flattened_dependencies[key] |= value # Finally, return that dict of dependencies return flattened_dependencies def _resolve_package_dependencies( catkin_packages, dependency, catkin, rosdep, resolved_dependencies ): # No need to resolve this dependency if we know it's local, or if # we've already resolved it into a system dependency if dependency in resolved_dependencies or ( catkin_packages and dependency in catkin_packages ): return if _dependency_is_in_underlay(catkin, dependency): # Package was found-- don't pull anything extra to satisfy # this dependency. logger.debug("Satisfied dependency {!r} in underlay".format(dependency)) return # In this situation, the package depends on something that we # weren't instructed to build. It's probably a system dependency, # but the developer could have also forgotten to tell us to build # it. try: these_dependencies = rosdep.resolve_dependency(dependency) except _ros.rosdep.RosdepDependencyNotResolvedError: raise CatkinInvalidSystemDependencyError(dependency) for key, value in these_dependencies.items(): if key not in _SUPPORTED_DEPENDENCY_TYPES: raise CatkinUnsupportedDependencyTypeError(key, dependency) resolved_dependencies[dependency] = {key: value} def _dependency_is_in_underlay(catkin, dependency): if catkin: # Before trying to resolve this dependency into a system # dependency, see if it's already in the underlay. try: catkin.find(dependency) except CatkinPackageNotFoundError: # No package by that name is available pass else: return True return False def _handle_rosinstall_files(wstool, rosinstall_files): """Merge given rosinstall files into our workspace.""" for rosinstall_file in rosinstall_files: logger.info("Merging {}".format(rosinstall_file)) wstool.merge(rosinstall_file) logger.info("Updating workspace...") wstool.update() def _recursively_handle_rosinstall_files(wstool, source_path, *, cache=None): "Recursively find and merge rosinstall files and update workspace" rosinstall_files: Set[str] = set() if not cache: cache: Set[str] = set() # Walk the entire source directory looking for rosinstall files. Keep track # of any we haven't seen previously. for root, directories, files in os.walk(source_path): for file_name in files: path = os.path.join(root, file_name) if path.endswith(".rosinstall") and path not in cache: rosinstall_files.add(path) # If we came across previously-unseen rosinstall files, add them to the # cache. Then handle them (merge/update). Finally, walk again. Do this # until no new rosinstall files are discovered. if rosinstall_files: cache.update(rosinstall_files) _handle_rosinstall_files(wstool, rosinstall_files) _recursively_handle_rosinstall_files(wstool, source_path, cache=cache) class CatkinPackageNotFoundError(errors.SnapcraftError): fmt = "Unable to find Catkin package {package_name!r}" def __init__(self, package_name): super().__init__(package_name=package_name) class _Catkin: def __init__( self, ros_distro: str, workspaces: List[str], catkin_path: str, project: "Project", ) -> None: self._ros_distro = ros_distro self._workspaces = workspaces self._catkin_path = catkin_path self._project = project self._catkin_install_path = os.path.join(self._catkin_path, "install") self._catkin_stage_packages_path = ( pathlib.Path(self._catkin_path) / "stage_packages" ) def setup(self): os.makedirs(self._catkin_install_path, exist_ok=True) # With the introduction of an underlay, we no longer know where Catkin # is. Let's just fetch/unpack our own, and use it. logger.info("Installing catkin...") repo.Ubuntu.fetch_stage_packages( package_names=["ros-{}-catkin".format(self._ros_distro)], stage_packages_path=self._catkin_stage_packages_path, base=self._project._get_build_base(), target_arch=self._project._get_stage_packages_target_arch(), ) repo.Ubuntu.unpack_stage_packages( stage_packages_path=self._catkin_stage_packages_path, install_path=pathlib.Path(self._catkin_install_path), ) def find(self, package_name): with contextlib.suppress(subprocess.CalledProcessError): path = self._run(["--first-only", package_name]).strip() # Not a valid find if the package resolves into our own catkin # workspace. That won't be transitioned into the snap. if not path.startswith(self._catkin_install_path): return path raise CatkinPackageNotFoundError(package_name) def _run(self, arguments): with tempfile.NamedTemporaryFile(mode="w+") as f: lines = [ "export PYTHONPATH={}".format( os.path.join( self._catkin_install_path, "usr", "lib", "python2.7", "dist-packages", ) ) ] ros_path = os.path.join( self._catkin_install_path, "opt", "ros", self._ros_distro ) bin_paths = ( os.path.join(ros_path, "bin"), os.path.join(self._catkin_install_path, "usr", "bin"), ) lines.append( "export {}".format( formatting_utils.format_path_variable( "PATH", bin_paths, prepend="", separator=":" ) ) ) # Source our own workspace so we have all of Catkin's dependencies, # then source the workspace we're actually supposed to be crawling. lines.append( "_CATKIN_SETUP_DIR={} source {} --local".format( ros_path, os.path.join(ros_path, "setup.sh") ) ) for workspace in self._workspaces: lines.append( "_CATKIN_SETUP_DIR={} source {} --local --extend".format( workspace, os.path.join(workspace, "setup.sh") ) ) lines.append('exec "$@"') f.write("\n".join(lines)) f.flush() return ( subprocess.check_output( ["/bin/bash", f.name, "catkin_find"] + arguments, stderr=subprocess.STDOUT, ) .decode("utf8") .strip() ) def _get_highest_version_path(path): paths = sorted(glob.glob(os.path.join(path, "*"))) if not paths: raise CatkinNoHighestVersionPathError(path) return paths[-1]
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2019 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import pathlib import re import shutil import subprocess from unittest import mock import pytest import testtools from testtools.matchers import Equals from snapcraft import file_utils from snapcraft.internal import common, errors from tests import fixture_setup, unit class TestReplaceInFile: scenarios = [ ( "2to3", { "file_path": os.path.join("bin", "2to3"), "contents": "#!/foo/bar/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "snapcraft", { "file_path": os.path.join("bin", "snapcraft"), "contents": "#!/foo/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "foo", { "file_path": os.path.join("bin", "foo"), "contents": "foo", "expected": "foo", }, ), ] def test_replace_in_file(self, tmp_work_path, file_path, contents, expected): (tmp_work_path / "bin").mkdir() with open(file_path, "w") as f: f.write(contents) file_utils.replace_in_file( "bin", re.compile(r""), re.compile(r"#!.*python"), r"#!/usr/bin/env python" ) with open(file_path, "r") as f: assert f.read() == expected class TestLinkOrCopyTree(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_to_file_raises(self): raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_file_into_directory(self): os.mkdir("qux") raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_directory_to_directory(self): file_utils.link_or_copy_tree("foo", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "2"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "baz", "4"))) def test_link_directory_overwrite_file_raises(self): open("qux", "w").close() raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "foo", "qux" ) self.assertThat( str(raised), Equals("Cannot overwrite non-directory 'qux' with directory 'foo'"), ) def test_link_subtree(self): file_utils.link_or_copy_tree("foo/bar", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "baz", "4"))) def test_link_symlink_to_file(self): # Create a symlink to a file os.symlink("2", os.path.join("foo", "2-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "2-link"), unit.LinkExists("2")) def test_link_symlink_to_dir(self): os.symlink("bar", os.path.join("foo", "bar-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "bar-link"), unit.LinkExists("bar")) class TestLinkOrCopy(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_ioerror(self): orig_link = os.link def link_and_ioerror(a, b, **kwargs): orig_link(a, b) raise IOError() with mock.patch("os.link") as mock_link: mock_link.side_effect = link_and_ioerror file_utils.link_or_copy("1", "foo/1") def test_copy_nested_file(self): file_utils.link_or_copy("foo/bar/baz/4", "foo2/bar/baz/4") self.assertTrue(os.path.isfile("foo2/bar/baz/4")) class RequiresCommandSuccessTestCase(unit.TestCase): @mock.patch("subprocess.check_call") def test_requires_command_works(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo").__enter__() mock_check_call.assert_called_once_with( ["foo"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_multipe_args(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo bar baz").__enter__() mock_check_call.assert_called_once_with( ["foo", "bar", "baz"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_success_not_found(self, mock_check_call): mock_check_call.side_effect = [FileNotFoundError()] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' not found.")) @mock.patch("subprocess.check_call") def test_requires_command_success_error(self, mock_check_call): mock_check_call.side_effect = [subprocess.CalledProcessError(1, "x")] raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' failed.")) def test_requires_command_success_broken(self): raised = self.assertRaises( TypeError, file_utils.requires_command_success(1).__enter__ ) self.assertThat(str(raised), Equals("command must be a string.")) @mock.patch("subprocess.check_call") def test_requires_command_success_custom_error(self, mock_check_call): mock_check_call.side_effect = [ FileNotFoundError(), subprocess.CalledProcessError(1, "x"), ] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success( "foo", not_found_fmt="uhm? {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("uhm? ['foo'] -> foo")) raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success( "foo", failure_fmt="failed {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("failed ['foo'] -> foo")) class RequiresPathExistsTestCase(unit.TestCase): def setUp(self): super(RequiresPathExistsTestCase, self).setUp() with open("bar", "w") as fd: fd.write("test") def test_requires_path_exists_works(self): file_utils.requires_path_exists("bar").__enter__() def test_requires_path_exists_fails(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("Required path does not exist: 'foo'")) def test_requires_path_exists_custom_error(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists( "foo", error_fmt="what? {path!r}" ).__enter__, ) self.assertThat(str(raised), Equals("what? 'foo'")) class TestGetLinkerFromFile(unit.TestCase): def test_get_linker_version_from_basename(self): self.assertThat( file_utils.get_linker_version_from_file("ld-2.26.so"), Equals("2.26") ) def test_get_linker_version_from_path(self): self.assertThat( file_utils.get_linker_version_from_file("/lib/x86/ld-2.23.so"), Equals("2.23"), ) class TestGetLinkerFromFileErrors(unit.TestCase): def test_bad_file_formatlinker_raises_exception(self): self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.get_linker_version_from_file, linker_file="lib64/ld-linux-x86-64.so.2", ) _BIN_PATHS = [ os.path.join("usr", "local", "sbin"), os.path.join("usr", "local", "bin"), os.path.join("usr", "sbin"), os.path.join("usr", "bin"), os.path.join("sbin"), os.path.join("bin"), ] class TestGetToolPath: scenarios = [ (i, dict(tool_path=pathlib.Path(i) / "tool-command")) for i in _BIN_PATHS ] def test_get_tool_from_host_path(self, monkeypatch, tool_path, fake_exists): abs_tool_path = pathlib.Path("/") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(shutil, "which", lambda x: abs_tool_path.as_posix()) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_snapcraft_snap_path(self, in_snap, tool_path, fake_exists): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_docker_snap_path( self, monkeypatch, in_snap, tool_path, fake_exists ): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(common, "is_process_container", lambda: True) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_host_tool_finds_command(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: "/usr/bin/foo") assert file_utils.get_host_tool_path(command_name="foo", package_name="foo") def test_get_host_tool_failure(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: None) with pytest.raises(errors.SnapcraftHostToolNotFoundError) as error: file_utils.get_host_tool_path(command_name="foo", package_name="foo-pkg") assert error.command_name == "foo" assert error.package_name == "foo-pkg" class GetToolPathErrorsTest(testtools.TestCase): def test_get_snap_tool_path_fails(self): self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", ) def test_get_snap_tool_path_in_container_fails_root(self): self.useFixture(fixture_setup.FakeSnapcraftIsASnap()) self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", )
chipaca/snapcraft
tests/unit/test_file_utils.py
snapcraft/plugins/v1/catkin.py
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2017 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import doctest from snapcraft.internal.project_loader.grammar_processing import ( _global_grammar_processor as processor, ) def load_tests(loader, tests, ignore): tests.addTests(doctest.DocTestSuite(processor)) return tests
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2019 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import pathlib import re import shutil import subprocess from unittest import mock import pytest import testtools from testtools.matchers import Equals from snapcraft import file_utils from snapcraft.internal import common, errors from tests import fixture_setup, unit class TestReplaceInFile: scenarios = [ ( "2to3", { "file_path": os.path.join("bin", "2to3"), "contents": "#!/foo/bar/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "snapcraft", { "file_path": os.path.join("bin", "snapcraft"), "contents": "#!/foo/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "foo", { "file_path": os.path.join("bin", "foo"), "contents": "foo", "expected": "foo", }, ), ] def test_replace_in_file(self, tmp_work_path, file_path, contents, expected): (tmp_work_path / "bin").mkdir() with open(file_path, "w") as f: f.write(contents) file_utils.replace_in_file( "bin", re.compile(r""), re.compile(r"#!.*python"), r"#!/usr/bin/env python" ) with open(file_path, "r") as f: assert f.read() == expected class TestLinkOrCopyTree(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_to_file_raises(self): raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_file_into_directory(self): os.mkdir("qux") raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_directory_to_directory(self): file_utils.link_or_copy_tree("foo", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "2"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "baz", "4"))) def test_link_directory_overwrite_file_raises(self): open("qux", "w").close() raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "foo", "qux" ) self.assertThat( str(raised), Equals("Cannot overwrite non-directory 'qux' with directory 'foo'"), ) def test_link_subtree(self): file_utils.link_or_copy_tree("foo/bar", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "baz", "4"))) def test_link_symlink_to_file(self): # Create a symlink to a file os.symlink("2", os.path.join("foo", "2-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "2-link"), unit.LinkExists("2")) def test_link_symlink_to_dir(self): os.symlink("bar", os.path.join("foo", "bar-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "bar-link"), unit.LinkExists("bar")) class TestLinkOrCopy(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_ioerror(self): orig_link = os.link def link_and_ioerror(a, b, **kwargs): orig_link(a, b) raise IOError() with mock.patch("os.link") as mock_link: mock_link.side_effect = link_and_ioerror file_utils.link_or_copy("1", "foo/1") def test_copy_nested_file(self): file_utils.link_or_copy("foo/bar/baz/4", "foo2/bar/baz/4") self.assertTrue(os.path.isfile("foo2/bar/baz/4")) class RequiresCommandSuccessTestCase(unit.TestCase): @mock.patch("subprocess.check_call") def test_requires_command_works(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo").__enter__() mock_check_call.assert_called_once_with( ["foo"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_multipe_args(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo bar baz").__enter__() mock_check_call.assert_called_once_with( ["foo", "bar", "baz"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_success_not_found(self, mock_check_call): mock_check_call.side_effect = [FileNotFoundError()] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' not found.")) @mock.patch("subprocess.check_call") def test_requires_command_success_error(self, mock_check_call): mock_check_call.side_effect = [subprocess.CalledProcessError(1, "x")] raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' failed.")) def test_requires_command_success_broken(self): raised = self.assertRaises( TypeError, file_utils.requires_command_success(1).__enter__ ) self.assertThat(str(raised), Equals("command must be a string.")) @mock.patch("subprocess.check_call") def test_requires_command_success_custom_error(self, mock_check_call): mock_check_call.side_effect = [ FileNotFoundError(), subprocess.CalledProcessError(1, "x"), ] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success( "foo", not_found_fmt="uhm? {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("uhm? ['foo'] -> foo")) raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success( "foo", failure_fmt="failed {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("failed ['foo'] -> foo")) class RequiresPathExistsTestCase(unit.TestCase): def setUp(self): super(RequiresPathExistsTestCase, self).setUp() with open("bar", "w") as fd: fd.write("test") def test_requires_path_exists_works(self): file_utils.requires_path_exists("bar").__enter__() def test_requires_path_exists_fails(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("Required path does not exist: 'foo'")) def test_requires_path_exists_custom_error(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists( "foo", error_fmt="what? {path!r}" ).__enter__, ) self.assertThat(str(raised), Equals("what? 'foo'")) class TestGetLinkerFromFile(unit.TestCase): def test_get_linker_version_from_basename(self): self.assertThat( file_utils.get_linker_version_from_file("ld-2.26.so"), Equals("2.26") ) def test_get_linker_version_from_path(self): self.assertThat( file_utils.get_linker_version_from_file("/lib/x86/ld-2.23.so"), Equals("2.23"), ) class TestGetLinkerFromFileErrors(unit.TestCase): def test_bad_file_formatlinker_raises_exception(self): self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.get_linker_version_from_file, linker_file="lib64/ld-linux-x86-64.so.2", ) _BIN_PATHS = [ os.path.join("usr", "local", "sbin"), os.path.join("usr", "local", "bin"), os.path.join("usr", "sbin"), os.path.join("usr", "bin"), os.path.join("sbin"), os.path.join("bin"), ] class TestGetToolPath: scenarios = [ (i, dict(tool_path=pathlib.Path(i) / "tool-command")) for i in _BIN_PATHS ] def test_get_tool_from_host_path(self, monkeypatch, tool_path, fake_exists): abs_tool_path = pathlib.Path("/") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(shutil, "which", lambda x: abs_tool_path.as_posix()) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_snapcraft_snap_path(self, in_snap, tool_path, fake_exists): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_docker_snap_path( self, monkeypatch, in_snap, tool_path, fake_exists ): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(common, "is_process_container", lambda: True) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_host_tool_finds_command(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: "/usr/bin/foo") assert file_utils.get_host_tool_path(command_name="foo", package_name="foo") def test_get_host_tool_failure(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: None) with pytest.raises(errors.SnapcraftHostToolNotFoundError) as error: file_utils.get_host_tool_path(command_name="foo", package_name="foo-pkg") assert error.command_name == "foo" assert error.package_name == "foo-pkg" class GetToolPathErrorsTest(testtools.TestCase): def test_get_snap_tool_path_fails(self): self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", ) def test_get_snap_tool_path_in_container_fails_root(self): self.useFixture(fixture_setup.FakeSnapcraftIsASnap()) self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", )
chipaca/snapcraft
tests/unit/test_file_utils.py
tests/unit/project_loader/grammar_processing/test_global_grammar_processor.py
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2017-2018 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import fixtures from testtools.matchers import Equals import snapcraft from tests import unit class VersionTestCase(unit.TestCase): def test_version_from_snap(self): self.useFixture(fixtures.EnvironmentVariable("SNAP_NAME", "snapcraft")) self.useFixture(fixtures.EnvironmentVariable("SNAP_VERSION", "3.14")) self.assertThat(snapcraft._get_version(), Equals("3.14"))
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2019 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import pathlib import re import shutil import subprocess from unittest import mock import pytest import testtools from testtools.matchers import Equals from snapcraft import file_utils from snapcraft.internal import common, errors from tests import fixture_setup, unit class TestReplaceInFile: scenarios = [ ( "2to3", { "file_path": os.path.join("bin", "2to3"), "contents": "#!/foo/bar/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "snapcraft", { "file_path": os.path.join("bin", "snapcraft"), "contents": "#!/foo/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "foo", { "file_path": os.path.join("bin", "foo"), "contents": "foo", "expected": "foo", }, ), ] def test_replace_in_file(self, tmp_work_path, file_path, contents, expected): (tmp_work_path / "bin").mkdir() with open(file_path, "w") as f: f.write(contents) file_utils.replace_in_file( "bin", re.compile(r""), re.compile(r"#!.*python"), r"#!/usr/bin/env python" ) with open(file_path, "r") as f: assert f.read() == expected class TestLinkOrCopyTree(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_to_file_raises(self): raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_file_into_directory(self): os.mkdir("qux") raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_directory_to_directory(self): file_utils.link_or_copy_tree("foo", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "2"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "baz", "4"))) def test_link_directory_overwrite_file_raises(self): open("qux", "w").close() raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "foo", "qux" ) self.assertThat( str(raised), Equals("Cannot overwrite non-directory 'qux' with directory 'foo'"), ) def test_link_subtree(self): file_utils.link_or_copy_tree("foo/bar", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "baz", "4"))) def test_link_symlink_to_file(self): # Create a symlink to a file os.symlink("2", os.path.join("foo", "2-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "2-link"), unit.LinkExists("2")) def test_link_symlink_to_dir(self): os.symlink("bar", os.path.join("foo", "bar-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "bar-link"), unit.LinkExists("bar")) class TestLinkOrCopy(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_ioerror(self): orig_link = os.link def link_and_ioerror(a, b, **kwargs): orig_link(a, b) raise IOError() with mock.patch("os.link") as mock_link: mock_link.side_effect = link_and_ioerror file_utils.link_or_copy("1", "foo/1") def test_copy_nested_file(self): file_utils.link_or_copy("foo/bar/baz/4", "foo2/bar/baz/4") self.assertTrue(os.path.isfile("foo2/bar/baz/4")) class RequiresCommandSuccessTestCase(unit.TestCase): @mock.patch("subprocess.check_call") def test_requires_command_works(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo").__enter__() mock_check_call.assert_called_once_with( ["foo"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_multipe_args(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo bar baz").__enter__() mock_check_call.assert_called_once_with( ["foo", "bar", "baz"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_success_not_found(self, mock_check_call): mock_check_call.side_effect = [FileNotFoundError()] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' not found.")) @mock.patch("subprocess.check_call") def test_requires_command_success_error(self, mock_check_call): mock_check_call.side_effect = [subprocess.CalledProcessError(1, "x")] raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' failed.")) def test_requires_command_success_broken(self): raised = self.assertRaises( TypeError, file_utils.requires_command_success(1).__enter__ ) self.assertThat(str(raised), Equals("command must be a string.")) @mock.patch("subprocess.check_call") def test_requires_command_success_custom_error(self, mock_check_call): mock_check_call.side_effect = [ FileNotFoundError(), subprocess.CalledProcessError(1, "x"), ] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success( "foo", not_found_fmt="uhm? {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("uhm? ['foo'] -> foo")) raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success( "foo", failure_fmt="failed {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("failed ['foo'] -> foo")) class RequiresPathExistsTestCase(unit.TestCase): def setUp(self): super(RequiresPathExistsTestCase, self).setUp() with open("bar", "w") as fd: fd.write("test") def test_requires_path_exists_works(self): file_utils.requires_path_exists("bar").__enter__() def test_requires_path_exists_fails(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("Required path does not exist: 'foo'")) def test_requires_path_exists_custom_error(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists( "foo", error_fmt="what? {path!r}" ).__enter__, ) self.assertThat(str(raised), Equals("what? 'foo'")) class TestGetLinkerFromFile(unit.TestCase): def test_get_linker_version_from_basename(self): self.assertThat( file_utils.get_linker_version_from_file("ld-2.26.so"), Equals("2.26") ) def test_get_linker_version_from_path(self): self.assertThat( file_utils.get_linker_version_from_file("/lib/x86/ld-2.23.so"), Equals("2.23"), ) class TestGetLinkerFromFileErrors(unit.TestCase): def test_bad_file_formatlinker_raises_exception(self): self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.get_linker_version_from_file, linker_file="lib64/ld-linux-x86-64.so.2", ) _BIN_PATHS = [ os.path.join("usr", "local", "sbin"), os.path.join("usr", "local", "bin"), os.path.join("usr", "sbin"), os.path.join("usr", "bin"), os.path.join("sbin"), os.path.join("bin"), ] class TestGetToolPath: scenarios = [ (i, dict(tool_path=pathlib.Path(i) / "tool-command")) for i in _BIN_PATHS ] def test_get_tool_from_host_path(self, monkeypatch, tool_path, fake_exists): abs_tool_path = pathlib.Path("/") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(shutil, "which", lambda x: abs_tool_path.as_posix()) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_snapcraft_snap_path(self, in_snap, tool_path, fake_exists): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_docker_snap_path( self, monkeypatch, in_snap, tool_path, fake_exists ): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(common, "is_process_container", lambda: True) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_host_tool_finds_command(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: "/usr/bin/foo") assert file_utils.get_host_tool_path(command_name="foo", package_name="foo") def test_get_host_tool_failure(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: None) with pytest.raises(errors.SnapcraftHostToolNotFoundError) as error: file_utils.get_host_tool_path(command_name="foo", package_name="foo-pkg") assert error.command_name == "foo" assert error.package_name == "foo-pkg" class GetToolPathErrorsTest(testtools.TestCase): def test_get_snap_tool_path_fails(self): self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", ) def test_get_snap_tool_path_in_container_fails_root(self): self.useFixture(fixture_setup.FakeSnapcraftIsASnap()) self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", )
chipaca/snapcraft
tests/unit/test_file_utils.py
tests/unit/test_init.py
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2018 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import logging from textwrap import dedent import fixtures from testtools.matchers import Equals, HasLength from snapcraft.internal import errors, mountinfo from tests import unit class MountInfoTestCase(unit.TestCase): def _write_mountinfo(self, contents): path = "mountinfo" with open(path, "w") as f: f.write(contents) return path def test_mountinfo_by_root(self): mounts = mountinfo.MountInfo( mountinfo_file=self._write_mountinfo( dedent( """\ 23 28 0:4 / /proc rw,nosuid,nodev,noexec,relatime shared:14 - proc proc rw 1341 28 7:6 / /snap/snapcraft/1 ro,nodev,relatime shared:39 - squashfs /dev/loop6 ro 1455 28 253:0 /test-snap/prime /snap/test-snap/x1 ro,relatime shared:1 - ext4 /dev/mapper/foo rw,errors=remount-ro,data=ordered """ ) ) ) # noqa root_mounts = mounts.for_root("/") for mount_point in ("/proc", "/snap/snapcraft/1"): self.assertTrue( any(m for m in root_mounts if m.mount_point == mount_point), "Expected {!r} to be included in root mounts".format(mount_point), ) test_snap_mounts = mounts.for_root("/test-snap/prime") self.assertThat(test_snap_mounts, HasLength(1)) self.expectThat(test_snap_mounts[0].mount_point, Equals("/snap/test-snap/x1")) def test_mountinfo_by_mount_point(self): mounts = mountinfo.MountInfo( mountinfo_file=self._write_mountinfo( dedent( """\ 23 28 0:4 / /proc rw,nosuid,nodev,noexec,relatime shared:14 - proc proc rw 1341 28 7:6 / /snap/snapcraft/1 ro,nodev,relatime shared:39 - squashfs /dev/loop6 ro 1455 28 253:0 /test-snap/prime /snap/test-snap/x1 ro,relatime shared:1 - ext4 /dev/mapper/foo rw,errors=remount-ro,data=ordered """ ) ) ) # noqa mount = mounts.for_mount_point("/proc") self.assertThat(mount.mount_id, Equals("23")) self.assertThat(mount.parent_id, Equals("28")) self.assertThat(mount.st_dev, Equals("0:4")) self.assertThat(mount.root, Equals("/")) self.assertThat(mount.mount_point, Equals("/proc")) self.assertThat(mount.mount_options, Equals("rw,nosuid,nodev,noexec,relatime")) self.assertThat(mount.optional_fields, Equals(["shared:14"])) self.assertThat(mount.filesystem_type, Equals("proc")) self.assertThat(mount.mount_source, Equals("proc")) self.assertThat(mount.super_options, Equals("rw")) mount = mounts.for_mount_point("/snap/snapcraft/1") self.assertThat(mount.mount_id, Equals("1341")) self.assertThat(mount.parent_id, Equals("28")) self.assertThat(mount.st_dev, Equals("7:6")) self.assertThat(mount.root, Equals("/")) self.assertThat(mount.mount_point, Equals("/snap/snapcraft/1")) self.assertThat(mount.mount_options, Equals("ro,nodev,relatime")) self.assertThat(mount.optional_fields, Equals(["shared:39"])) self.assertThat(mount.filesystem_type, Equals("squashfs")) self.assertThat(mount.mount_source, Equals("/dev/loop6")) self.assertThat(mount.super_options, Equals("ro")) mount = mounts.for_mount_point("/snap/test-snap/x1") self.assertThat(mount.mount_id, Equals("1455")) self.assertThat(mount.parent_id, Equals("28")) self.assertThat(mount.st_dev, Equals("253:0")) self.assertThat(mount.root, Equals("/test-snap/prime")) self.assertThat(mount.mount_point, Equals("/snap/test-snap/x1")) self.assertThat(mount.mount_options, Equals("ro,relatime")) self.assertThat(mount.optional_fields, Equals(["shared:1"])) self.assertThat(mount.filesystem_type, Equals("ext4")) self.assertThat(mount.mount_source, Equals("/dev/mapper/foo")) self.assertThat( mount.super_options, Equals("rw,errors=remount-ro,data=ordered") ) def test_mountinfo_missing_root(self): mounts = mountinfo.MountInfo(mountinfo_file=self._write_mountinfo("")) raised = self.assertRaises( errors.RootNotMountedError, mounts.for_root, "test-root" ) self.assertThat(raised.root, Equals("test-root")) def test_mountinfo_missing_mount_point(self): mounts = mountinfo.MountInfo(mountinfo_file=self._write_mountinfo("")) raised = self.assertRaises( errors.MountPointNotFoundError, mounts.for_mount_point, "test-root" ) self.assertThat(raised.mount_point, Equals("test-root")) def test_invalid_mountinfo(self): self.fake_logger = fixtures.FakeLogger(level=logging.WARN) self.useFixture(self.fake_logger) mountinfo.MountInfo(mountinfo_file=self._write_mountinfo(dedent("I'm invalid"))) # Assert that a warning was logged self.assertThat( self.fake_logger.output, Equals("Unable to parse mountinfo row: I'm invalid\n"), )
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2019 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import pathlib import re import shutil import subprocess from unittest import mock import pytest import testtools from testtools.matchers import Equals from snapcraft import file_utils from snapcraft.internal import common, errors from tests import fixture_setup, unit class TestReplaceInFile: scenarios = [ ( "2to3", { "file_path": os.path.join("bin", "2to3"), "contents": "#!/foo/bar/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "snapcraft", { "file_path": os.path.join("bin", "snapcraft"), "contents": "#!/foo/baz/python", "expected": "#!/usr/bin/env python", }, ), ( "foo", { "file_path": os.path.join("bin", "foo"), "contents": "foo", "expected": "foo", }, ), ] def test_replace_in_file(self, tmp_work_path, file_path, contents, expected): (tmp_work_path / "bin").mkdir() with open(file_path, "w") as f: f.write(contents) file_utils.replace_in_file( "bin", re.compile(r""), re.compile(r"#!.*python"), r"#!/usr/bin/env python" ) with open(file_path, "r") as f: assert f.read() == expected class TestLinkOrCopyTree(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_to_file_raises(self): raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_file_into_directory(self): os.mkdir("qux") raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "1", "qux" ) self.assertThat(str(raised), Equals("'1' is not a directory")) def test_link_directory_to_directory(self): file_utils.link_or_copy_tree("foo", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "2"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "baz", "4"))) def test_link_directory_overwrite_file_raises(self): open("qux", "w").close() raised = self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.link_or_copy_tree, "foo", "qux" ) self.assertThat( str(raised), Equals("Cannot overwrite non-directory 'qux' with directory 'foo'"), ) def test_link_subtree(self): file_utils.link_or_copy_tree("foo/bar", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "baz", "4"))) def test_link_symlink_to_file(self): # Create a symlink to a file os.symlink("2", os.path.join("foo", "2-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "2-link"), unit.LinkExists("2")) def test_link_symlink_to_dir(self): os.symlink("bar", os.path.join("foo", "bar-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "bar-link"), unit.LinkExists("bar")) class TestLinkOrCopy(unit.TestCase): def setUp(self): super().setUp() os.makedirs("foo/bar/baz") open("1", "w").close() open(os.path.join("foo", "2"), "w").close() open(os.path.join("foo", "bar", "3"), "w").close() open(os.path.join("foo", "bar", "baz", "4"), "w").close() def test_link_file_ioerror(self): orig_link = os.link def link_and_ioerror(a, b, **kwargs): orig_link(a, b) raise IOError() with mock.patch("os.link") as mock_link: mock_link.side_effect = link_and_ioerror file_utils.link_or_copy("1", "foo/1") def test_copy_nested_file(self): file_utils.link_or_copy("foo/bar/baz/4", "foo2/bar/baz/4") self.assertTrue(os.path.isfile("foo2/bar/baz/4")) class RequiresCommandSuccessTestCase(unit.TestCase): @mock.patch("subprocess.check_call") def test_requires_command_works(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo").__enter__() mock_check_call.assert_called_once_with( ["foo"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_multipe_args(self, mock_check_call): mock_check_call.side_effect = [None] file_utils.requires_command_success("foo bar baz").__enter__() mock_check_call.assert_called_once_with( ["foo", "bar", "baz"], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) @mock.patch("subprocess.check_call") def test_requires_command_success_not_found(self, mock_check_call): mock_check_call.side_effect = [FileNotFoundError()] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' not found.")) @mock.patch("subprocess.check_call") def test_requires_command_success_error(self, mock_check_call): mock_check_call.side_effect = [subprocess.CalledProcessError(1, "x")] raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("'foo' failed.")) def test_requires_command_success_broken(self): raised = self.assertRaises( TypeError, file_utils.requires_command_success(1).__enter__ ) self.assertThat(str(raised), Equals("command must be a string.")) @mock.patch("subprocess.check_call") def test_requires_command_success_custom_error(self, mock_check_call): mock_check_call.side_effect = [ FileNotFoundError(), subprocess.CalledProcessError(1, "x"), ] raised = self.assertRaises( errors.RequiredCommandNotFound, file_utils.requires_command_success( "foo", not_found_fmt="uhm? {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("uhm? ['foo'] -> foo")) raised = self.assertRaises( errors.RequiredCommandFailure, file_utils.requires_command_success( "foo", failure_fmt="failed {cmd_list!r} -> {command}" ).__enter__, ) self.assertThat(str(raised), Equals("failed ['foo'] -> foo")) class RequiresPathExistsTestCase(unit.TestCase): def setUp(self): super(RequiresPathExistsTestCase, self).setUp() with open("bar", "w") as fd: fd.write("test") def test_requires_path_exists_works(self): file_utils.requires_path_exists("bar").__enter__() def test_requires_path_exists_fails(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists("foo").__enter__, ) self.assertIsInstance(raised, errors.SnapcraftError) self.assertThat(str(raised), Equals("Required path does not exist: 'foo'")) def test_requires_path_exists_custom_error(self): raised = self.assertRaises( errors.RequiredPathDoesNotExist, file_utils.requires_path_exists( "foo", error_fmt="what? {path!r}" ).__enter__, ) self.assertThat(str(raised), Equals("what? 'foo'")) class TestGetLinkerFromFile(unit.TestCase): def test_get_linker_version_from_basename(self): self.assertThat( file_utils.get_linker_version_from_file("ld-2.26.so"), Equals("2.26") ) def test_get_linker_version_from_path(self): self.assertThat( file_utils.get_linker_version_from_file("/lib/x86/ld-2.23.so"), Equals("2.23"), ) class TestGetLinkerFromFileErrors(unit.TestCase): def test_bad_file_formatlinker_raises_exception(self): self.assertRaises( errors.SnapcraftEnvironmentError, file_utils.get_linker_version_from_file, linker_file="lib64/ld-linux-x86-64.so.2", ) _BIN_PATHS = [ os.path.join("usr", "local", "sbin"), os.path.join("usr", "local", "bin"), os.path.join("usr", "sbin"), os.path.join("usr", "bin"), os.path.join("sbin"), os.path.join("bin"), ] class TestGetToolPath: scenarios = [ (i, dict(tool_path=pathlib.Path(i) / "tool-command")) for i in _BIN_PATHS ] def test_get_tool_from_host_path(self, monkeypatch, tool_path, fake_exists): abs_tool_path = pathlib.Path("/") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(shutil, "which", lambda x: abs_tool_path.as_posix()) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_snapcraft_snap_path(self, in_snap, tool_path, fake_exists): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_tool_from_docker_snap_path( self, monkeypatch, in_snap, tool_path, fake_exists ): abs_tool_path = pathlib.Path("/snap/snapcraft/current") / tool_path fake_exists.paths = [abs_tool_path] monkeypatch.setattr(common, "is_process_container", lambda: True) assert file_utils.get_snap_tool_path("tool-command") == abs_tool_path.as_posix() def test_get_host_tool_finds_command(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: "/usr/bin/foo") assert file_utils.get_host_tool_path(command_name="foo", package_name="foo") def test_get_host_tool_failure(monkeypatch): monkeypatch.setattr(shutil, "which", lambda x: None) with pytest.raises(errors.SnapcraftHostToolNotFoundError) as error: file_utils.get_host_tool_path(command_name="foo", package_name="foo-pkg") assert error.command_name == "foo" assert error.package_name == "foo-pkg" class GetToolPathErrorsTest(testtools.TestCase): def test_get_snap_tool_path_fails(self): self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", ) def test_get_snap_tool_path_in_container_fails_root(self): self.useFixture(fixture_setup.FakeSnapcraftIsASnap()) self.assertRaises( errors.ToolMissingError, file_utils.get_snap_tool_path, "non-existent-tool-command", )
chipaca/snapcraft
tests/unit/test_file_utils.py
tests/unit/test_mountinfo.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst from io import StringIO import numpy as np from astropy.io import ascii from .common import assert_equal def test_types_from_dat(): converters = {'a': [ascii.convert_numpy(float)], 'e': [ascii.convert_numpy(str)]} dat = ascii.read(['a b c d e', '1 1 cat 2.1 4.2'], Reader=ascii.Basic, converters=converters) assert dat['a'].dtype.kind == 'f' assert dat['b'].dtype.kind == 'i' assert dat['c'].dtype.kind in ('S', 'U') assert dat['d'].dtype.kind == 'f' assert dat['e'].dtype.kind in ('S', 'U') def test_rdb_write_types(): dat = ascii.read(['a b c d', '1 1.0 cat 2.1'], Reader=ascii.Basic) out = StringIO() ascii.write(dat, out, Writer=ascii.Rdb) outs = out.getvalue().splitlines() assert_equal(outs[1], 'N\tN\tS\tN') def test_ipac_read_types(): table = r"""\ | ra | dec | sai |-----v2---| sptype | | real | float | l | real | char | | unit | unit | unit | unit | ergs | | null | null | null | null | -999 | 2.09708 2956 73765 2.06000 B8IVpMnHg """ reader = ascii.get_reader(Reader=ascii.Ipac) reader.read(table) types = [ascii.FloatType, ascii.FloatType, ascii.IntType, ascii.FloatType, ascii.StrType] for (col, expected_type) in zip(reader.cols, types): assert_equal(col.type, expected_type) def test_col_dtype_in_custom_class(): """Test code in BaseOutputter._convert_vals to handle Column.dtype attribute. See discussion in #11895.""" dtypes = [np.float32, np.int8, np.int16] class TestDtypeHeader(ascii.BasicHeader): def get_cols(self, lines): super().get_cols(lines) for col, dtype in zip(self.cols, dtypes): col.dtype = dtype class TestDtype(ascii.Basic): """ Basic table Data Reader with data type alternating float32, int8 """ header_class = TestDtypeHeader txt = """ a b c 1 2 3 """ reader = ascii.get_reader(TestDtype) t = reader.read(txt) for col, dtype in zip(t.itercols(), dtypes): assert col.dtype.type is dtype
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from copy import deepcopy import numpy as np import pytest import re from astropy import units as u from astropy.units import allclose from astropy.tests.helper import assert_quantity_allclose as assert_allclose from astropy.utils.exceptions import AstropyWarning from astropy.time import Time from astropy.coordinates import ( EarthLocation, galactocentric_frame_defaults, representation as r, SkyCoord, ) from astropy.coordinates.attributes import ( Attribute, CoordinateAttribute, DifferentialAttribute, EarthLocationAttribute, QuantityAttribute, TimeAttribute, ) from astropy.coordinates.baseframe import ( BaseCoordinateFrame, RepresentationMapping ) from astropy.coordinates.builtin_frames import ( AltAz, HADec, FK4, FK5, Galactic, Galactocentric, GCRS, HCRS, ICRS, ITRS ) from astropy.coordinates.representation import ( CartesianDifferential, REPRESENTATION_CLASSES, ) from .test_representation import unitphysics # this fixture is used below # noqa def setup_function(func): """Copy original 'REPRESENTATIONCLASSES' as attribute in function.""" func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES) def teardown_function(func): """Reset REPRESENTATION_CLASSES to original value.""" REPRESENTATION_CLASSES.clear() REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG) def test_frame_attribute_descriptor(): """Unit tests of the Attribute descriptor.""" class TestAttributes: attr_none = Attribute() attr_2 = Attribute(default=2) attr_3_attr2 = Attribute(default=3, secondary_attribute='attr_2') attr_none_attr2 = Attribute(default=None, secondary_attribute='attr_2') attr_none_nonexist = Attribute( default=None, secondary_attribute='nonexist' ) t = TestAttributes() # Defaults assert t.attr_none is None assert t.attr_2 == 2 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 assert t.attr_none_nonexist is None # No default and non-existent secondary attr # Setting values via '_'-prefixed internal vars # (as would normally done in __init__) t._attr_none = 10 assert t.attr_none == 10 t._attr_2 = 20 assert t.attr_2 == 20 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 t._attr_none_attr2 = 40 assert t.attr_none_attr2 == 40 # Make sure setting values via public attribute fails with pytest.raises(AttributeError) as err: t.attr_none = 5 assert 'Cannot set frame attribute' in str(err.value) def test_frame_subclass_attribute_descriptor(): """Unit test of the attribute descriptors in subclasses.""" _EQUINOX_B1980 = Time('B1980', scale='tai') class MyFK4(FK4): # equinox inherited from FK4, obstime overridden, and newattr is new obstime = TimeAttribute(default=_EQUINOX_B1980) newattr = Attribute(default='newattr') mfk4 = MyFK4() assert mfk4.equinox.value == 'B1950.000' assert mfk4.obstime.value == 'B1980.000' assert mfk4.newattr == 'newattr' assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr']) mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world') assert mfk4.equinox.value == 'J1980.000' assert mfk4.obstime.value == 'J1990.000' assert mfk4.newattr == 'world' def test_frame_multiple_inheritance_attribute_descriptor(): """ Ensure that all attributes are accumulated in case of inheritance from multiple BaseCoordinateFrames. See https://github.com/astropy/astropy/pull/11099#issuecomment-735829157 """ class Frame1(BaseCoordinateFrame): attr1 = Attribute() class Frame2(BaseCoordinateFrame): attr2 = Attribute() class Frame3(Frame1, Frame2): pass assert len(Frame3.frame_attributes) == 2 assert 'attr1' in Frame3.frame_attributes assert 'attr2' in Frame3.frame_attributes # In case the same attribute exists in both frames, the one from the # left-most class in the MRO should take precedence class Frame4(BaseCoordinateFrame): attr1 = Attribute() attr2 = Attribute() class Frame5(Frame1, Frame4): pass assert Frame5.frame_attributes['attr1'] is Frame1.frame_attributes['attr1'] assert Frame5.frame_attributes['attr2'] is Frame4.frame_attributes['attr2'] def test_differentialattribute(): # Test logic of passing input through to allowed class vel = [1, 2, 3]*u.km/u.s dif = r.CartesianDifferential(vel) class TestFrame(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential]) frame1 = TestFrame() frame2 = TestFrame(attrtest=dif) frame3 = TestFrame(attrtest=vel) assert np.all(frame1.attrtest.d_xyz == frame2.attrtest.d_xyz) assert np.all(frame1.attrtest.d_xyz == frame3.attrtest.d_xyz) # This shouldn't work if there is more than one allowed class: class TestFrame2(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential, r.CylindricalDifferential]) frame1 = TestFrame2() frame2 = TestFrame2(attrtest=dif) with pytest.raises(TypeError): TestFrame2(attrtest=vel) def test_create_data_frames(): # from repr i1 = ICRS(r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)) i2 = ICRS(r.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg)) # from preferred name i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) i4 = ICRS(ra=1*u.deg, dec=2*u.deg) assert i1.data.lat == i3.data.lat assert i1.data.lon == i3.data.lon assert i1.data.distance == i3.data.distance assert i2.data.lat == i4.data.lat assert i2.data.lon == i4.data.lon # now make sure the preferred names work as properties assert_allclose(i1.ra, i3.ra) assert_allclose(i2.ra, i4.ra) assert_allclose(i1.distance, i3.distance) with pytest.raises(AttributeError): i1.ra = [11.]*u.deg def test_create_orderered_data(): TOL = 1e-10*u.deg i = ICRS(1*u.deg, 2*u.deg) assert (i.ra - 1*u.deg) < TOL assert (i.dec - 2*u.deg) < TOL g = Galactic(1*u.deg, 2*u.deg) assert (g.l - 1*u.deg) < TOL assert (g.b - 2*u.deg) < TOL a = AltAz(1*u.deg, 2*u.deg) assert (a.az - 1*u.deg) < TOL assert (a.alt - 2*u.deg) < TOL with pytest.raises(TypeError): ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg) with pytest.raises(TypeError): sph = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) ICRS(sph, 1*u.deg, 2*u.deg) def test_create_nodata_frames(): i = ICRS() assert len(i.get_frame_attr_names()) == 0 f5 = FK5() assert f5.equinox == FK5.get_frame_attr_names()['equinox'] f4 = FK4() assert f4.equinox == FK4.get_frame_attr_names()['equinox'] # obstime is special because it's a property that uses equinox if obstime is not set assert f4.obstime in (FK4.get_frame_attr_names()['obstime'], FK4.get_frame_attr_names()['equinox']) def test_no_data_nonscalar_frames(): a1 = AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3, 1)) * u.deg_C) assert a1.obstime.shape == (3, 10) assert a1.temperature.shape == (3, 10) assert a1.shape == (3, 10) with pytest.raises(ValueError) as exc: AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3,)) * u.deg_C) assert 'inconsistent shapes' in str(exc.value) def test_frame_repr(): i = ICRS() assert repr(i) == '<ICRS Frame>' f5 = FK5() assert repr(f5).startswith('<FK5 Frame (equinox=') i2 = ICRS(ra=1*u.deg, dec=2*u.deg) i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' (1., 2., 3.)>') # try with arrays i2 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[2.1, 3.1]*u.deg) i3 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[-15.6, 17.1]*u.deg, distance=[11., 21.]*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' [(1.1, 2.1), (2.1, 3.1)]>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' [(1.1, -15.6, 11.), (2.1, 17.1, 21.)]>') def test_frame_repr_vels(): i = ICRS(ra=1*u.deg, dec=2*u.deg, pm_ra_cosdec=1*u.marcsec/u.yr, pm_dec=2*u.marcsec/u.yr) # unit comes out as mas/yr because of the preferred units defined in the # frame RepresentationMapping assert repr(i) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)\n' ' (pm_ra_cosdec, pm_dec) in mas / yr\n' ' (1., 2.)>') def test_converting_units(): # this is a regular expression that with split (see below) removes what's # the decimal point to fix rounding problems rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)') # Use values that aren't subject to rounding down to X.9999... i2 = ICRS(ra=2.*u.deg, dec=2.*u.deg) i2_many = ICRS(ra=[2., 4.]*u.deg, dec=[2., -8.1]*u.deg) # converting from FK5 to ICRS and back changes the *internal* representation, # but it should still come out in the preferred form i4 = i2.transform_to(FK5()).transform_to(ICRS()) i4_many = i2_many.transform_to(FK5()).transform_to(ICRS()) ri2 = ''.join(rexrepr.split(repr(i2))) ri4 = ''.join(rexrepr.split(repr(i4))) assert ri2 == ri4 assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed ri2_many = ''.join(rexrepr.split(repr(i2_many))) ri4_many = ''.join(rexrepr.split(repr(i4_many))) assert ri2_many == ri4_many assert i2_many.data.lon.unit != i4_many.data.lon.unit # Internal repr changed # but that *shouldn't* hold if we turn off units for the representation class FakeICRS(ICRS): frame_specific_representation_info = { 'spherical': [RepresentationMapping('lon', 'ra', u.hourangle), RepresentationMapping('lat', 'dec', None), RepresentationMapping('distance', 'distance')] # should fall back to default of None unit } fi = FakeICRS(i4.data) ri2 = ''.join(rexrepr.split(repr(i2))) rfi = ''.join(rexrepr.split(repr(fi))) rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match assert ri2 != rfi # the attributes should also get the right units assert i2.dec.unit == i4.dec.unit # unless no/explicitly given units assert i2.dec.unit != fi.dec.unit assert i2.ra.unit != fi.ra.unit assert fi.ra.unit == u.hourangle def test_representation_info(): class NewICRS1(ICRS): frame_specific_representation_info = { r.SphericalRepresentation: [ RepresentationMapping('lon', 'rara', u.hourangle), RepresentationMapping('lat', 'decdec', u.degree), RepresentationMapping('distance', 'distance', u.kpc)] } i1 = NewICRS1(rara=10*u.degree, decdec=-12*u.deg, distance=1000*u.pc, pm_rara_cosdecdec=100*u.mas/u.yr, pm_decdec=17*u.mas/u.yr, radial_velocity=10*u.km/u.s) assert allclose(i1.rara, 10*u.deg) assert i1.rara.unit == u.hourangle assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.distance, 1000*u.pc) assert i1.distance.unit == u.kpc assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # this should auto-set the names of UnitSpherical: i1.set_representation_cls(r.UnitSphericalRepresentation, s=r.UnitSphericalCosLatDifferential) assert allclose(i1.rara, 10*u.deg) assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # For backwards compatibility, we also support the string name in the # representation info dictionary: class NewICRS2(ICRS): frame_specific_representation_info = { 'spherical': [ RepresentationMapping('lon', 'ang1', u.hourangle), RepresentationMapping('lat', 'ang2', u.degree), RepresentationMapping('distance', 'howfar', u.kpc)] } i2 = NewICRS2(ang1=10*u.degree, ang2=-12*u.deg, howfar=1000*u.pc) assert allclose(i2.ang1, 10*u.deg) assert i2.ang1.unit == u.hourangle assert allclose(i2.ang2, -12*u.deg) assert allclose(i2.howfar, 1000*u.pc) assert i2.howfar.unit == u.kpc # Test that the differential kwargs get overridden class NewICRS3(ICRS): frame_specific_representation_info = { r.SphericalCosLatDifferential: [ RepresentationMapping('d_lon_coslat', 'pm_ang1', u.hourangle/u.year), RepresentationMapping('d_lat', 'pm_ang2'), RepresentationMapping('d_distance', 'vlos', u.kpc/u.Myr)] } i3 = NewICRS3(lon=10*u.degree, lat=-12*u.deg, distance=1000*u.pc, pm_ang1=1*u.mas/u.yr, pm_ang2=2*u.mas/u.yr, vlos=100*u.km/u.s) assert allclose(i3.pm_ang1, 1*u.mas/u.yr) assert i3.pm_ang1.unit == u.hourangle/u.year assert allclose(i3.pm_ang2, 2*u.mas/u.yr) assert allclose(i3.vlos, 100*u.km/u.s) assert i3.vlos.unit == u.kpc/u.Myr def test_realizing(): rep = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) i = ICRS() i2 = i.realize_frame(rep) assert not i.has_data assert i2.has_data f = FK5(equinox=Time('J2001')) f2 = f.realize_frame(rep) assert not f.has_data assert f2.has_data assert f2.equinox == f.equinox assert f2.equinox != FK5.get_frame_attr_names()['equinox'] # Check that a nicer error message is returned: with pytest.raises(TypeError) as excinfo: f.realize_frame(f.representation_type) assert ('Class passed as data instead of a representation' in excinfo.value.args[0]) def test_replicating(): i = ICRS(ra=[1]*u.deg, dec=[2]*u.deg) icopy = i.replicate(copy=True) irepl = i.replicate(copy=False) i.data._lat[:] = 0*u.deg assert np.all(i.data.lat == irepl.data.lat) assert np.all(i.data.lat != icopy.data.lat) iclone = i.replicate_without_data() assert i.has_data assert not iclone.has_data aa = AltAz(alt=1*u.deg, az=2*u.deg, obstime=Time('J2000')) aaclone = aa.replicate_without_data(obstime=Time('J2001')) assert not aaclone.has_data assert aa.obstime != aaclone.obstime assert aa.pressure == aaclone.pressure assert aa.obswl == aaclone.obswl def test_getitem(): rep = r.SphericalRepresentation( [1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc) i = ICRS(rep) assert len(i.ra) == 3 iidx = i[1:] assert len(iidx.ra) == 2 iidx2 = i[0] assert iidx2.ra.isscalar def test_transform(): """ This test just makes sure the transform architecture works, but does *not* actually test all the builtin transforms themselves are accurate. """ i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ == r.UnitSphericalRepresentation assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ != r.UnitSphericalRepresentation f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f4 = f.transform_to(FK4()) f4_2 = f.transform_to(FK4(equinox=f.equinox)) # make sure attributes are copied over correctly assert f4.equinox == FK4().equinox assert f4_2.equinox == f.equinox # make sure self-transforms also work i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i.transform_to(ICRS()) assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f2 = f.transform_to(FK5()) # default equinox, so should be *different* assert f2.equinox == FK5().equinox with pytest.raises(AssertionError): assert_allclose(f.ra, f2.ra) with pytest.raises(AssertionError): assert_allclose(f.dec, f2.dec) # finally, check Galactic round-tripping i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i1.transform_to(Galactic()).transform_to(ICRS()) assert_allclose(i1.ra, i2.ra) assert_allclose(i1.dec, i2.dec) def test_transform_to_nonscalar_nodata_frame(): # https://github.com/astropy/astropy/pull/5254#issuecomment-241592353 times = Time('2016-08-23') + np.linspace(0, 10, 12)*u.day coo1 = ICRS(ra=[[0.], [10.], [20.]]*u.deg, dec=[[-30.], [30.], [60.]]*u.deg) coo2 = coo1.transform_to(FK5(equinox=times)) assert coo2.shape == (3, 12) def test_setitem_no_velocity(): """Test different flavors of item setting for a Frame without a velocity. """ obstime = 'B1955' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = sc0.copy() sc1_repr = repr(sc1) assert 'representation' in sc1.cache sc1[1] = sc2[0] assert sc1.cache == {} assert repr(sc2) != sc1_repr assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) # Works for array-valued obstime so long as they are considered equivalent sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, obstime]) sc1[0] = sc2[0] # Multidimensional coordinates sc1 = FK4([[1, 2], [3, 4]] * u.deg, [[5, 6], [7, 8]] * u.deg) sc2 = FK4([[10, 20], [30, 40]] * u.deg, [[50, 60], [70, 80]] * u.deg) sc1[0] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [[10, 20], [3, 4]]) assert np.allclose(sc1.dec.to_value(u.deg), [[50, 60], [7, 8]]) def test_setitem_velocities(): """Test different flavors of item setting for a Frame with a velocity. """ sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s, obstime='B1950') sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, radial_velocity=[10, 20]*u.km/u.s, obstime='B1950') sc1 = sc0.copy() sc1[1] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [1, 10]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 10]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 20]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [20, 10]) def test_setitem_exceptions(): obstime = 'B1950' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = Galactic(sc0.ra, sc0.dec) with pytest.raises(TypeError, match='can only set from object of same class: ' 'Galactic vs. FK4'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime='B2001') with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra[0], sc0.dec[0], obstime=obstime) with pytest.raises(TypeError, match="scalar 'FK4' frame object does not support " 'item assignment'): sc1[0] = sc2[0] sc1 = FK4(obstime=obstime) with pytest.raises(ValueError, match='cannot set frame which has no data'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] # Wrong shape sc1 = FK4([sc0.ra], [sc0.dec], obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] def test_sep(): i1 = ICRS(ra=0*u.deg, dec=1*u.deg) i2 = ICRS(ra=0*u.deg, dec=2*u.deg) sep = i1.separation(i2) assert sep.deg == 1 i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc) sep3d = i3.separation_3d(i4) assert_allclose(sep3d.to(u.kpc), np.array([1, 1])*u.kpc) # check that it works even with velocities i5 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) i6 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[7, 8]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) sep3d = i5.separation_3d(i6) assert_allclose(sep3d.to(u.kpc), np.array([2, 2])*u.kpc) # 3d separations of dimensionless distances should still work i7 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.one) i8 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=4*u.one) sep3d = i7.separation_3d(i8) assert_allclose(sep3d, 1*u.one) # but should fail with non-dimensionless with pytest.raises(ValueError): i7.separation_3d(i3) def test_time_inputs(): """ Test validation and conversion of inputs for equinox and obstime attributes. """ c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00') assert c.equinox == Time('J2001.5') assert c.obstime == Time('2000-01-01 12:00:00') with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5) assert 'Invalid time input' in str(err.value) with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, obstime='hello') assert 'Invalid time input' in str(err.value) # A vector time should work if the shapes match, but we don't automatically # broadcast the basic data (just like time). FK4([1, 2] * u.deg, [2, 3] * u.deg, obstime=['J2000', 'J2001']) with pytest.raises(ValueError) as err: FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001']) assert 'shape' in str(err.value) def test_is_frame_attr_default(): """ Check that the `is_frame_attr_default` machinery works as expected """ c1 = FK5(ra=1*u.deg, dec=1*u.deg) c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox']) c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5')) assert c1.equinox == c2.equinox assert c1.equinox != c3.equinox assert c1.is_frame_attr_default('equinox') assert not c2.is_frame_attr_default('equinox') assert not c3.is_frame_attr_default('equinox') c4 = c1.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) c5 = c2.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) assert c4.is_frame_attr_default('equinox') assert not c5.is_frame_attr_default('equinox') def test_altaz_attributes(): aa = AltAz(1*u.deg, 2*u.deg) assert aa.obstime is None assert aa.location is None aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000') assert aa2.obstime == Time('J2000') aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert isinstance(aa3.location, EarthLocation) def test_hadec_attributes(): hd = HADec(1*u.hourangle, 2*u.deg) assert hd.ha == 1.*u.hourangle assert hd.dec == 2*u.deg assert hd.obstime is None assert hd.location is None hd2 = HADec(23*u.hourangle, -2*u.deg, obstime='J2000', location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert_allclose(hd2.ha, -1*u.hourangle) assert hd2.dec == -2*u.deg assert hd2.obstime == Time('J2000') assert isinstance(hd2.location, EarthLocation) sr = hd2.represent_as(r.SphericalRepresentation) assert_allclose(sr.lon, -1*u.hourangle) def test_representation(): """ Test the getter and setter properties for `representation` """ # Create the frame object. icrs = ICRS(ra=1*u.deg, dec=1*u.deg) data = icrs.data # Create some representation objects. icrs_cart = icrs.cartesian icrs_spher = icrs.spherical icrs_cyl = icrs.cylindrical # Testing when `_representation` set to `CartesianRepresentation`. icrs.representation_type = r.CartesianRepresentation assert icrs.representation_type == r.CartesianRepresentation assert icrs_cart.x == icrs.x assert icrs_cart.y == icrs.y assert icrs_cart.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CartesianRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing when `_representation` set to `CylindricalRepresentation`. icrs.representation_type = r.CylindricalRepresentation assert icrs.representation_type == r.CylindricalRepresentation assert icrs.data == data # Testing setter input using text argument for spherical. icrs.representation_type = 'spherical' assert icrs.representation_type is r.SphericalRepresentation assert icrs_spher.lat == icrs.dec assert icrs_spher.lon == icrs.ra assert icrs_spher.distance == icrs.distance assert icrs.data == data # Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes. for attr in ('x', 'y', 'z'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing setter input using text argument for cylindrical. icrs.representation_type = 'cylindrical' assert icrs.representation_type is r.CylindricalRepresentation assert icrs_cyl.rho == icrs.rho assert icrs_cyl.phi == icrs.phi assert icrs_cyl.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CylindricalRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = 'WRONG' assert 'but must be a BaseRepresentation class' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = ICRS assert 'but must be a BaseRepresentation class' in str(err.value) def test_represent_as(): icrs = ICRS(ra=1*u.deg, dec=1*u.deg) cart1 = icrs.represent_as('cartesian') cart2 = icrs.represent_as(r.CartesianRepresentation) cart1.x == cart2.x cart1.y == cart2.y cart1.z == cart2.z # now try with velocities icrs = ICRS(ra=0*u.deg, dec=0*u.deg, distance=10*u.kpc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=1*u.km/u.s) # single string rep2 = icrs.represent_as('cylindrical') assert isinstance(rep2, r.CylindricalRepresentation) assert isinstance(rep2.differentials['s'], r.CylindricalDifferential) # single class with positional in_frame_units, verify that warning raised with pytest.warns(AstropyWarning, match='argument position') as w: icrs.represent_as(r.CylindricalRepresentation, False) assert len(w) == 1 # TODO: this should probably fail in the future once we figure out a better # workaround for dealing with UnitSphericalRepresentation's with # RadialDifferential's # two classes # rep2 = icrs.represent_as(r.CartesianRepresentation, # r.SphericalCosLatDifferential) # assert isinstance(rep2, r.CartesianRepresentation) # assert isinstance(rep2.differentials['s'], r.SphericalCosLatDifferential) with pytest.raises(ValueError): icrs.represent_as('odaigahara') def test_shorthand_representations(): rep = r.CartesianRepresentation([1, 2, 3]*u.pc) dif = r.CartesianDifferential([1, 2, 3]*u.km/u.s) rep = rep.with_differentials(dif) icrs = ICRS(rep) cyl = icrs.cylindrical assert isinstance(cyl, r.CylindricalRepresentation) assert isinstance(cyl.differentials['s'], r.CylindricalDifferential) sph = icrs.spherical assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalDifferential) sph = icrs.sphericalcoslat assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalCosLatDifferential) def test_equal(): obstime = 'B1955' sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([1, 20]*u.deg, [3, 4]*u.deg, obstime=obstime) # Compare arrays and scalars eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa (numpy True not Python True) assert (sc1[0] != sc2[0]) == False # noqa # Broadcasting eq = sc1[0] == sc2 ne = sc1[0] != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) # With diff only in velocity sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s) sc2 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 20]*u.km/u.s) eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa assert (sc1[0] != sc2[0]) == False # noqa assert (FK4() == ICRS()) is False assert (FK4() == FK4(obstime='J1999')) is False def test_equal_exceptions(): # Shape mismatch sc1 = FK4([1, 2, 3]*u.deg, [3, 4, 5]*u.deg) with pytest.raises(ValueError, match='cannot compare: shape mismatch'): sc1 == sc1[:2] # Different representation_type sc1 = FK4(1, 2, 3, representation_type='cartesian') sc2 = FK4(1*u.deg, 2*u.deg, 2, representation_type='spherical') with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: CartesianRepresentation vs. SphericalRepresentation'): sc1 == sc2 # Different differential type sc1 = FK4(1*u.deg, 2*u.deg, radial_velocity=1*u.km/u.s) sc2 = FK4(1*u.deg, 2*u.deg, pm_ra_cosdec=1*u.mas/u.yr, pm_dec=1*u.mas/u.yr) with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: RadialDifferential vs. UnitSphericalCosLatDifferential'): sc1 == sc2 # Different frame attribute sc1 = FK5(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J1999') with pytest.raises(TypeError, match=r'cannot compare: objects must have equivalent ' r'frames: <FK5 Frame \(equinox=J2000.000\)> ' r'vs. <FK5 Frame \(equinox=J1999.000\)>'): sc1 == sc2 # Different frame sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') with pytest.raises(TypeError, match='cannot compare: objects must have equivalent ' r'frames: <FK4 Frame \(equinox=B1950.000, obstime=B1950.000\)> ' r'vs. <FK5 Frame \(equinox=J2000.000\)>'): sc1 == sc2 sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK4() with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc1 == sc2 with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc2 == sc1 def test_dynamic_attrs(): c = ICRS(1*u.deg, 2*u.deg) assert 'ra' in dir(c) assert 'dec' in dir(c) with pytest.raises(AttributeError) as err: c.blahblah assert "object has no attribute 'blahblah'" in str(err.value) with pytest.raises(AttributeError) as err: c.ra = 1 assert "Cannot set any frame attribute" in str(err.value) c.blahblah = 1 assert c.blahblah == 1 def test_nodata_error(): i = ICRS() with pytest.raises(ValueError) as excinfo: i.data assert 'does not have associated data' in str(excinfo.value) def test_len0_data(): i = ICRS([]*u.deg, []*u.deg) assert i.has_data repr(i) def test_quantity_attributes(): # make sure we can create a GCRS frame with valid inputs GCRS(obstime='J2002', obsgeoloc=[1, 2, 3]*u.km, obsgeovel=[4, 5, 6]*u.km/u.s) # make sure it fails for invalid lovs or vels with pytest.raises(TypeError): GCRS(obsgeoloc=[1, 2, 3]) # no unit with pytest.raises(u.UnitsError): GCRS(obsgeoloc=[1, 2, 3]*u.km/u.s) # incorrect unit with pytest.raises(ValueError): GCRS(obsgeoloc=[1, 3]*u.km) # incorrect shape def test_quantity_attribute_default(): # The default default (yes) is None: class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.deg) frame = MyCoord() assert frame.someval is None frame = MyCoord(someval=15*u.deg) assert u.isclose(frame.someval, 15*u.deg) # This should work if we don't explicitly pass in a unit, but we pass in a # default value with a unit class MyCoord2(BaseCoordinateFrame): someval = QuantityAttribute(15*u.deg) frame = MyCoord2() assert u.isclose(frame.someval, 15*u.deg) # Since here no shape was given, we can set to any shape we like. frame = MyCoord2(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert np.all(frame.someval == 1*u.deg) # We should also be able to insist on a given shape. class MyCoord3(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.arcsec, shape=(3,)) frame = MyCoord3(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert frame.someval.unit == u.arcsec assert u.allclose(frame.someval.value, 3600.) # The wrong shape raises. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=1.*u.deg) # As does the wrong unit. with pytest.raises(u.UnitsError): MyCoord3(someval=np.ones(3)*u.m) # We are allowed a short-cut for zero. frame0 = MyCoord3(someval=0) assert frame0.someval.shape == (3,) assert frame0.someval.unit == u.arcsec assert np.all(frame0.someval.value == 0.) # But not if it has the wrong shape. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=np.zeros(2)) # This should fail, if we don't pass in a default or a unit with pytest.raises(ValueError): class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute() def test_eloc_attributes(): el = EarthLocation(lon=12.3*u.deg, lat=45.6*u.deg, height=1*u.km) it = ITRS(r.SphericalRepresentation(lon=12.3*u.deg, lat=45.6*u.deg, distance=1*u.km)) gc = GCRS(ra=12.3*u.deg, dec=45.6*u.deg, distance=6375*u.km) el1 = AltAz(location=el).location assert isinstance(el1, EarthLocation) # these should match *exactly* because the EarthLocation assert el1.lat == el.lat assert el1.lon == el.lon assert el1.height == el.height el2 = AltAz(location=it).location assert isinstance(el2, EarthLocation) # these should *not* match because giving something in Spherical ITRS is # *not* the same as giving it as an EarthLocation: EarthLocation is on an # elliptical geoid. So the longitude should match (because flattening is # only along the z-axis), but latitude should not. Also, height is relative # to the *surface* in EarthLocation, but the ITRS distance is relative to # the center of the Earth assert not allclose(el2.lat, it.spherical.lat) assert allclose(el2.lon, it.spherical.lon) assert el2.height < -6000*u.km el3 = AltAz(location=gc).location # GCRS inputs implicitly get transformed to ITRS and then onto # EarthLocation's elliptical geoid. So both lat and lon shouldn't match assert isinstance(el3, EarthLocation) assert not allclose(el3.lat, gc.dec) assert not allclose(el3.lon, gc.ra) assert np.abs(el3.height) < 500*u.km def test_equivalent_frames(): i = ICRS() i2 = ICRS(1*u.deg, 2*u.deg) assert i.is_equivalent_frame(i) assert i.is_equivalent_frame(i2) with pytest.raises(TypeError): assert i.is_equivalent_frame(10) with pytest.raises(TypeError): assert i2.is_equivalent_frame(SkyCoord(i2)) f0 = FK5() # this J2000 is TT f1 = FK5(equinox='J2000') f2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') f3 = FK5(equinox='J2010') f4 = FK4(equinox='J2010') assert f1.is_equivalent_frame(f1) assert not i.is_equivalent_frame(f1) assert f0.is_equivalent_frame(f1) assert f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f3.is_equivalent_frame(f4) aa1 = AltAz() aa2 = AltAz(obstime='J2010') assert aa2.is_equivalent_frame(aa2) assert not aa1.is_equivalent_frame(i) assert not aa1.is_equivalent_frame(aa2) def test_equivalent_frame_coordinateattribute(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) # These frames should not be considered equivalent f0 = FrameWithCoordinateAttribute() f1 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2000')) f2 = FrameWithCoordinateAttribute(coord_attr=HCRS(3*u.deg, 4*u.deg, obstime='J2000')) f3 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2001')) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) assert not f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f2.is_equivalent_frame(f3) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) assert f2.is_equivalent_frame(deepcopy(f2)) assert f3.is_equivalent_frame(deepcopy(f3)) def test_equivalent_frame_locationattribute(): class FrameWithLocationAttribute(BaseCoordinateFrame): loc_attr = EarthLocationAttribute() # These frames should not be considered equivalent f0 = FrameWithLocationAttribute() location = EarthLocation(lat=-34, lon=19, height=300) f1 = FrameWithLocationAttribute(loc_attr=location) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) def test_representation_subclass(): # Regression test for #3354 # Normally when instantiating a frame without a distance the frame will try # and use UnitSphericalRepresentation internally instead of # SphericalRepresentation. frame = FK5(representation_type=r.SphericalRepresentation, ra=32 * u.deg, dec=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == r.SphericalRepresentation # If using a SphericalRepresentation class this used to not work, so we # test here that this is now fixed. class NewSphericalRepresentation(r.SphericalRepresentation): attr_classes = r.SphericalRepresentation.attr_classes frame = FK5(representation_type=NewSphericalRepresentation, lon=32 * u.deg, lat=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == NewSphericalRepresentation # A similar issue then happened in __repr__ with subclasses of # SphericalRepresentation. assert repr(frame) == ("<FK5 Coordinate (equinox=J2000.000): (lon, lat) in deg\n" " (32., 20.)>") # A more subtle issue is when specifying a custom # UnitSphericalRepresentation subclass for the data and # SphericalRepresentation or a subclass for the representation. class NewUnitSphericalRepresentation(r.UnitSphericalRepresentation): attr_classes = r.UnitSphericalRepresentation.attr_classes def __repr__(self): return "<NewUnitSphericalRepresentation: spam spam spam>" frame = FK5(NewUnitSphericalRepresentation(lon=32 * u.deg, lat=20 * u.deg), representation_type=NewSphericalRepresentation) assert repr(frame) == "<FK5 Coordinate (equinox=J2000.000): spam spam spam>" def test_getitem_representation(): """ Make sure current representation survives __getitem__ even if different from data representation. """ c = ICRS([1, 1] * u.deg, [2, 2] * u.deg) c.representation_type = 'cartesian' assert c[0].representation_type is r.CartesianRepresentation def test_component_error_useful(): """ Check that a data-less frame gives useful error messages about not having data when the attributes asked for are possible coordinate components """ i = ICRS() with pytest.raises(ValueError) as excinfo: i.ra assert 'does not have associated data' in str(excinfo.value) with pytest.raises(AttributeError) as excinfo1: i.foobar with pytest.raises(AttributeError) as excinfo2: i.lon # lon is *not* the component name despite being the underlying representation's name assert "object has no attribute 'foobar'" in str(excinfo1.value) assert "object has no attribute 'lon'" in str(excinfo2.value) def test_cache_clear(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) assert len(i.cache['representation']) == 2 i.cache.clear() assert len(i.cache['representation']) == 0 def test_inplace_array(): i = ICRS([[1, 2], [3, 4]]*u.deg, [[10, 20], [30, 40]]*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[:, 0] = [100, 200]*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert_allclose(i.ra, [[100, 2], [200, 4]]*u.deg) assert_allclose(i.dec, [[10, 20], [30, 40]]*u.deg) def test_inplace_change(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[()] = 10*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert i.ra == 10 * u.deg assert i.dec == 2 * u.deg def test_representation_with_multiple_differentials(): dif1 = r.CartesianDifferential([1, 2, 3]*u.km/u.s) dif2 = r.CartesianDifferential([1, 2, 3]*u.km/u.s**2) rep = r.CartesianRepresentation([1, 2, 3]*u.pc, differentials={'s': dif1, 's2': dif2}) # check warning is raised for a scalar with pytest.raises(ValueError): ICRS(rep) def test_representation_arg_backwards_compatibility(): # TODO: this test can be removed when the `representation` argument is # removed from the BaseCoordinateFrame initializer. c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c2 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c3 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian') assert c1.x == c2.x assert c1.y == c2.y assert c1.z == c2.z assert c1.x == c3.x assert c1.y == c3.y assert c1.z == c3.z assert c1.representation_type == c1.representation_type with pytest.raises(ValueError): ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian', representation='cartesian') def test_missing_component_error_names(): """ This test checks that the component names are frame component names, not representation or differential names, when referenced in an exception raised when not passing in enough data. For example: ICRS(ra=10*u.deg) should state: TypeError: __init__() missing 1 required positional argument: 'dec' """ with pytest.raises(TypeError) as e: ICRS(ra=150 * u.deg) assert "missing 1 required positional argument: 'dec'" in str(e.value) with pytest.raises(TypeError) as e: ICRS(ra=150*u.deg, dec=-11*u.deg, pm_ra=100*u.mas/u.yr, pm_dec=10*u.mas/u.yr) assert "pm_ra_cosdec" in str(e.value) def test_non_spherical_representation_unit_creation(unitphysics): class PhysicsICRS(ICRS): default_representation = r.PhysicsSphericalRepresentation pic = PhysicsICRS(phi=1*u.deg, theta=25*u.deg, r=1*u.kpc) assert isinstance(pic.data, r.PhysicsSphericalRepresentation) picu = PhysicsICRS(phi=1*u.deg, theta=25*u.deg) assert isinstance(picu.data, unitphysics) def test_attribute_repr(): class Spam: def _astropy_repr_in_frame(self): return "TEST REPR" class TestFrame(BaseCoordinateFrame): attrtest = Attribute(default=Spam()) assert "TEST REPR" in repr(TestFrame()) def test_component_names_repr(): # Frame class with new component names that includes a name swap class NameChangeFrame(BaseCoordinateFrame): default_representation = r.PhysicsSphericalRepresentation frame_specific_representation_info = { r.PhysicsSphericalRepresentation: [ RepresentationMapping('phi', 'theta', u.deg), RepresentationMapping('theta', 'phi', u.arcsec), RepresentationMapping('r', 'JUSTONCE', u.AU)] } frame = NameChangeFrame(0*u.deg, 0*u.arcsec, 0*u.AU) # Check for the new names in the Frame repr assert "(theta, phi, JUSTONCE)" in repr(frame) # Check that the letter "r" has not been replaced more than once in the Frame repr assert repr(frame).count("JUSTONCE") == 1 @pytest.fixture def reset_galactocentric_defaults(): # TODO: this can be removed, along with the "warning" test below, once we # switch the default to 'latest' in v4.1 # Resets before each test, and after (the yield is pytest magic) galactocentric_frame_defaults.set('v4.0') yield galactocentric_frame_defaults.set('v4.0') def test_galactocentric_defaults(reset_galactocentric_defaults): with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() with galactocentric_frame_defaults.set('latest'): galcen_latest = Galactocentric() # parameters that changed assert not u.allclose(galcen_pre40.galcen_distance, galcen_40.galcen_distance) assert not u.allclose(galcen_pre40.z_sun, galcen_40.z_sun) for k in galcen_40.get_frame_attr_names(): if isinstance(getattr(galcen_40, k), BaseCoordinateFrame): continue # skip coordinate comparison... elif isinstance(getattr(galcen_40, k), CartesianDifferential): assert u.allclose(getattr(galcen_40, k).d_xyz, getattr(galcen_latest, k).d_xyz) else: assert getattr(galcen_40, k) == getattr(galcen_latest, k) # test validate Galactocentric with galactocentric_frame_defaults.set('latest'): params = galactocentric_frame_defaults.validate(galcen_latest) references = galcen_latest.frame_attribute_references state = dict(parameters=params, references=references) assert galactocentric_frame_defaults.parameters == params assert galactocentric_frame_defaults.references == references assert galactocentric_frame_defaults._state == state # Test not one of accepted parameter types with pytest.raises(ValueError): galactocentric_frame_defaults.validate(ValueError) # test parameters property assert ( galactocentric_frame_defaults.parameters == galactocentric_frame_defaults.parameters ) def test_galactocentric_references(reset_galactocentric_defaults): # references in the "scientific paper"-sense with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() for k in galcen_pre40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_pre40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() for k in galcen_40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_custom = Galactocentric(z_sun=15*u.pc) for k in galcen_custom.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue if k == 'z_sun': assert k not in galcen_custom.frame_attribute_references else: assert k in galcen_custom.frame_attribute_references def test_coordinateattribute_transformation(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) hcrs = HCRS(1*u.deg, 2*u.deg, 3*u.AU, obstime='2001-02-03') f1_frame = FrameWithCoordinateAttribute(coord_attr=hcrs) f1_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(hcrs)) # The input is already HCRS, so the frame attribute should not change it assert f1_frame.coord_attr == hcrs # The output should not be different if a SkyCoord is provided assert f1_skycoord.coord_attr == f1_frame.coord_attr gcrs = GCRS(4*u.deg, 5*u.deg, 6*u.AU, obstime='2004-05-06') f2_frame = FrameWithCoordinateAttribute(coord_attr=gcrs) f2_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(gcrs)) # The input needs to be converted from GCRS to HCRS assert isinstance(f2_frame.coord_attr, HCRS) # The `obstime` frame attribute should have been "merged" in a SkyCoord-style transformation assert f2_frame.coord_attr.obstime == gcrs.obstime # The output should not be different if a SkyCoord is provided assert f2_skycoord.coord_attr == f2_frame.coord_attr def test_realize_frame_accepts_kwargs(): c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) new_data = r.CartesianRepresentation(x=11*u.pc, y=12*u.pc, z=13*u.pc) c2 = c1.realize_frame(new_data, representation_type="cartesian") c3 = c1.realize_frame(new_data, representation_type="cylindrical") assert c2.representation_type == r.CartesianRepresentation assert c3.representation_type == r.CylindricalRepresentation def test_nameless_frame_subclass(): """Note: this is a regression test for #11096""" class Test: pass # Subclass from a frame class and a non-frame class. # This subclassing is the test! class NewFrame(ICRS, Test): pass
astropy/astropy
astropy/coordinates/tests/test_frames.py
astropy/io/ascii/tests/test_types.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Utility functions for ``constants`` sub-package.""" import itertools __all__ = [] def _get_c(codata, iaudata, module, not_in_module_only=True): """ Generator to return a Constant object. Parameters ---------- codata, iaudata : obj Modules containing CODATA and IAU constants of interest. module : obj Namespace module of interest. not_in_module_only : bool If ``True``, ignore constants that are already in the namespace of ``module``. Returns ------- _c : Constant Constant object to process. """ from .constant import Constant for _nm, _c in itertools.chain(sorted(vars(codata).items()), sorted(vars(iaudata).items())): if not isinstance(_c, Constant): continue elif (not not_in_module_only) or (_c.abbrev not in module.__dict__): yield _c def _set_c(codata, iaudata, module, not_in_module_only=True, doclines=None, set_class=False): """ Set constants in a given module namespace. Parameters ---------- codata, iaudata : obj Modules containing CODATA and IAU constants of interest. module : obj Namespace module to modify with the given ``codata`` and ``iaudata``. not_in_module_only : bool If ``True``, constants that are already in the namespace of ``module`` will not be modified. doclines : list or None If a list is given, this list will be modified in-place to include documentation of modified constants. This can be used to update docstring of ``module``. set_class : bool Namespace of ``module`` is populated with ``_c.__class__`` instead of just ``_c`` from :func:`_get_c`. """ for _c in _get_c(codata, iaudata, module, not_in_module_only=not_in_module_only): if set_class: value = _c.__class__(_c.abbrev, _c.name, _c.value, _c._unit_string, _c.uncertainty, _c.reference) else: value = _c setattr(module, _c.abbrev, value) if doclines is not None: doclines.append('{:^10} {:^14.9g} {:^16} {}'.format( _c.abbrev, _c.value, _c._unit_string, _c.name))
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from copy import deepcopy import numpy as np import pytest import re from astropy import units as u from astropy.units import allclose from astropy.tests.helper import assert_quantity_allclose as assert_allclose from astropy.utils.exceptions import AstropyWarning from astropy.time import Time from astropy.coordinates import ( EarthLocation, galactocentric_frame_defaults, representation as r, SkyCoord, ) from astropy.coordinates.attributes import ( Attribute, CoordinateAttribute, DifferentialAttribute, EarthLocationAttribute, QuantityAttribute, TimeAttribute, ) from astropy.coordinates.baseframe import ( BaseCoordinateFrame, RepresentationMapping ) from astropy.coordinates.builtin_frames import ( AltAz, HADec, FK4, FK5, Galactic, Galactocentric, GCRS, HCRS, ICRS, ITRS ) from astropy.coordinates.representation import ( CartesianDifferential, REPRESENTATION_CLASSES, ) from .test_representation import unitphysics # this fixture is used below # noqa def setup_function(func): """Copy original 'REPRESENTATIONCLASSES' as attribute in function.""" func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES) def teardown_function(func): """Reset REPRESENTATION_CLASSES to original value.""" REPRESENTATION_CLASSES.clear() REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG) def test_frame_attribute_descriptor(): """Unit tests of the Attribute descriptor.""" class TestAttributes: attr_none = Attribute() attr_2 = Attribute(default=2) attr_3_attr2 = Attribute(default=3, secondary_attribute='attr_2') attr_none_attr2 = Attribute(default=None, secondary_attribute='attr_2') attr_none_nonexist = Attribute( default=None, secondary_attribute='nonexist' ) t = TestAttributes() # Defaults assert t.attr_none is None assert t.attr_2 == 2 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 assert t.attr_none_nonexist is None # No default and non-existent secondary attr # Setting values via '_'-prefixed internal vars # (as would normally done in __init__) t._attr_none = 10 assert t.attr_none == 10 t._attr_2 = 20 assert t.attr_2 == 20 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 t._attr_none_attr2 = 40 assert t.attr_none_attr2 == 40 # Make sure setting values via public attribute fails with pytest.raises(AttributeError) as err: t.attr_none = 5 assert 'Cannot set frame attribute' in str(err.value) def test_frame_subclass_attribute_descriptor(): """Unit test of the attribute descriptors in subclasses.""" _EQUINOX_B1980 = Time('B1980', scale='tai') class MyFK4(FK4): # equinox inherited from FK4, obstime overridden, and newattr is new obstime = TimeAttribute(default=_EQUINOX_B1980) newattr = Attribute(default='newattr') mfk4 = MyFK4() assert mfk4.equinox.value == 'B1950.000' assert mfk4.obstime.value == 'B1980.000' assert mfk4.newattr == 'newattr' assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr']) mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world') assert mfk4.equinox.value == 'J1980.000' assert mfk4.obstime.value == 'J1990.000' assert mfk4.newattr == 'world' def test_frame_multiple_inheritance_attribute_descriptor(): """ Ensure that all attributes are accumulated in case of inheritance from multiple BaseCoordinateFrames. See https://github.com/astropy/astropy/pull/11099#issuecomment-735829157 """ class Frame1(BaseCoordinateFrame): attr1 = Attribute() class Frame2(BaseCoordinateFrame): attr2 = Attribute() class Frame3(Frame1, Frame2): pass assert len(Frame3.frame_attributes) == 2 assert 'attr1' in Frame3.frame_attributes assert 'attr2' in Frame3.frame_attributes # In case the same attribute exists in both frames, the one from the # left-most class in the MRO should take precedence class Frame4(BaseCoordinateFrame): attr1 = Attribute() attr2 = Attribute() class Frame5(Frame1, Frame4): pass assert Frame5.frame_attributes['attr1'] is Frame1.frame_attributes['attr1'] assert Frame5.frame_attributes['attr2'] is Frame4.frame_attributes['attr2'] def test_differentialattribute(): # Test logic of passing input through to allowed class vel = [1, 2, 3]*u.km/u.s dif = r.CartesianDifferential(vel) class TestFrame(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential]) frame1 = TestFrame() frame2 = TestFrame(attrtest=dif) frame3 = TestFrame(attrtest=vel) assert np.all(frame1.attrtest.d_xyz == frame2.attrtest.d_xyz) assert np.all(frame1.attrtest.d_xyz == frame3.attrtest.d_xyz) # This shouldn't work if there is more than one allowed class: class TestFrame2(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential, r.CylindricalDifferential]) frame1 = TestFrame2() frame2 = TestFrame2(attrtest=dif) with pytest.raises(TypeError): TestFrame2(attrtest=vel) def test_create_data_frames(): # from repr i1 = ICRS(r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)) i2 = ICRS(r.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg)) # from preferred name i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) i4 = ICRS(ra=1*u.deg, dec=2*u.deg) assert i1.data.lat == i3.data.lat assert i1.data.lon == i3.data.lon assert i1.data.distance == i3.data.distance assert i2.data.lat == i4.data.lat assert i2.data.lon == i4.data.lon # now make sure the preferred names work as properties assert_allclose(i1.ra, i3.ra) assert_allclose(i2.ra, i4.ra) assert_allclose(i1.distance, i3.distance) with pytest.raises(AttributeError): i1.ra = [11.]*u.deg def test_create_orderered_data(): TOL = 1e-10*u.deg i = ICRS(1*u.deg, 2*u.deg) assert (i.ra - 1*u.deg) < TOL assert (i.dec - 2*u.deg) < TOL g = Galactic(1*u.deg, 2*u.deg) assert (g.l - 1*u.deg) < TOL assert (g.b - 2*u.deg) < TOL a = AltAz(1*u.deg, 2*u.deg) assert (a.az - 1*u.deg) < TOL assert (a.alt - 2*u.deg) < TOL with pytest.raises(TypeError): ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg) with pytest.raises(TypeError): sph = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) ICRS(sph, 1*u.deg, 2*u.deg) def test_create_nodata_frames(): i = ICRS() assert len(i.get_frame_attr_names()) == 0 f5 = FK5() assert f5.equinox == FK5.get_frame_attr_names()['equinox'] f4 = FK4() assert f4.equinox == FK4.get_frame_attr_names()['equinox'] # obstime is special because it's a property that uses equinox if obstime is not set assert f4.obstime in (FK4.get_frame_attr_names()['obstime'], FK4.get_frame_attr_names()['equinox']) def test_no_data_nonscalar_frames(): a1 = AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3, 1)) * u.deg_C) assert a1.obstime.shape == (3, 10) assert a1.temperature.shape == (3, 10) assert a1.shape == (3, 10) with pytest.raises(ValueError) as exc: AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3,)) * u.deg_C) assert 'inconsistent shapes' in str(exc.value) def test_frame_repr(): i = ICRS() assert repr(i) == '<ICRS Frame>' f5 = FK5() assert repr(f5).startswith('<FK5 Frame (equinox=') i2 = ICRS(ra=1*u.deg, dec=2*u.deg) i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' (1., 2., 3.)>') # try with arrays i2 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[2.1, 3.1]*u.deg) i3 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[-15.6, 17.1]*u.deg, distance=[11., 21.]*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' [(1.1, 2.1), (2.1, 3.1)]>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' [(1.1, -15.6, 11.), (2.1, 17.1, 21.)]>') def test_frame_repr_vels(): i = ICRS(ra=1*u.deg, dec=2*u.deg, pm_ra_cosdec=1*u.marcsec/u.yr, pm_dec=2*u.marcsec/u.yr) # unit comes out as mas/yr because of the preferred units defined in the # frame RepresentationMapping assert repr(i) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)\n' ' (pm_ra_cosdec, pm_dec) in mas / yr\n' ' (1., 2.)>') def test_converting_units(): # this is a regular expression that with split (see below) removes what's # the decimal point to fix rounding problems rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)') # Use values that aren't subject to rounding down to X.9999... i2 = ICRS(ra=2.*u.deg, dec=2.*u.deg) i2_many = ICRS(ra=[2., 4.]*u.deg, dec=[2., -8.1]*u.deg) # converting from FK5 to ICRS and back changes the *internal* representation, # but it should still come out in the preferred form i4 = i2.transform_to(FK5()).transform_to(ICRS()) i4_many = i2_many.transform_to(FK5()).transform_to(ICRS()) ri2 = ''.join(rexrepr.split(repr(i2))) ri4 = ''.join(rexrepr.split(repr(i4))) assert ri2 == ri4 assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed ri2_many = ''.join(rexrepr.split(repr(i2_many))) ri4_many = ''.join(rexrepr.split(repr(i4_many))) assert ri2_many == ri4_many assert i2_many.data.lon.unit != i4_many.data.lon.unit # Internal repr changed # but that *shouldn't* hold if we turn off units for the representation class FakeICRS(ICRS): frame_specific_representation_info = { 'spherical': [RepresentationMapping('lon', 'ra', u.hourangle), RepresentationMapping('lat', 'dec', None), RepresentationMapping('distance', 'distance')] # should fall back to default of None unit } fi = FakeICRS(i4.data) ri2 = ''.join(rexrepr.split(repr(i2))) rfi = ''.join(rexrepr.split(repr(fi))) rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match assert ri2 != rfi # the attributes should also get the right units assert i2.dec.unit == i4.dec.unit # unless no/explicitly given units assert i2.dec.unit != fi.dec.unit assert i2.ra.unit != fi.ra.unit assert fi.ra.unit == u.hourangle def test_representation_info(): class NewICRS1(ICRS): frame_specific_representation_info = { r.SphericalRepresentation: [ RepresentationMapping('lon', 'rara', u.hourangle), RepresentationMapping('lat', 'decdec', u.degree), RepresentationMapping('distance', 'distance', u.kpc)] } i1 = NewICRS1(rara=10*u.degree, decdec=-12*u.deg, distance=1000*u.pc, pm_rara_cosdecdec=100*u.mas/u.yr, pm_decdec=17*u.mas/u.yr, radial_velocity=10*u.km/u.s) assert allclose(i1.rara, 10*u.deg) assert i1.rara.unit == u.hourangle assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.distance, 1000*u.pc) assert i1.distance.unit == u.kpc assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # this should auto-set the names of UnitSpherical: i1.set_representation_cls(r.UnitSphericalRepresentation, s=r.UnitSphericalCosLatDifferential) assert allclose(i1.rara, 10*u.deg) assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # For backwards compatibility, we also support the string name in the # representation info dictionary: class NewICRS2(ICRS): frame_specific_representation_info = { 'spherical': [ RepresentationMapping('lon', 'ang1', u.hourangle), RepresentationMapping('lat', 'ang2', u.degree), RepresentationMapping('distance', 'howfar', u.kpc)] } i2 = NewICRS2(ang1=10*u.degree, ang2=-12*u.deg, howfar=1000*u.pc) assert allclose(i2.ang1, 10*u.deg) assert i2.ang1.unit == u.hourangle assert allclose(i2.ang2, -12*u.deg) assert allclose(i2.howfar, 1000*u.pc) assert i2.howfar.unit == u.kpc # Test that the differential kwargs get overridden class NewICRS3(ICRS): frame_specific_representation_info = { r.SphericalCosLatDifferential: [ RepresentationMapping('d_lon_coslat', 'pm_ang1', u.hourangle/u.year), RepresentationMapping('d_lat', 'pm_ang2'), RepresentationMapping('d_distance', 'vlos', u.kpc/u.Myr)] } i3 = NewICRS3(lon=10*u.degree, lat=-12*u.deg, distance=1000*u.pc, pm_ang1=1*u.mas/u.yr, pm_ang2=2*u.mas/u.yr, vlos=100*u.km/u.s) assert allclose(i3.pm_ang1, 1*u.mas/u.yr) assert i3.pm_ang1.unit == u.hourangle/u.year assert allclose(i3.pm_ang2, 2*u.mas/u.yr) assert allclose(i3.vlos, 100*u.km/u.s) assert i3.vlos.unit == u.kpc/u.Myr def test_realizing(): rep = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) i = ICRS() i2 = i.realize_frame(rep) assert not i.has_data assert i2.has_data f = FK5(equinox=Time('J2001')) f2 = f.realize_frame(rep) assert not f.has_data assert f2.has_data assert f2.equinox == f.equinox assert f2.equinox != FK5.get_frame_attr_names()['equinox'] # Check that a nicer error message is returned: with pytest.raises(TypeError) as excinfo: f.realize_frame(f.representation_type) assert ('Class passed as data instead of a representation' in excinfo.value.args[0]) def test_replicating(): i = ICRS(ra=[1]*u.deg, dec=[2]*u.deg) icopy = i.replicate(copy=True) irepl = i.replicate(copy=False) i.data._lat[:] = 0*u.deg assert np.all(i.data.lat == irepl.data.lat) assert np.all(i.data.lat != icopy.data.lat) iclone = i.replicate_without_data() assert i.has_data assert not iclone.has_data aa = AltAz(alt=1*u.deg, az=2*u.deg, obstime=Time('J2000')) aaclone = aa.replicate_without_data(obstime=Time('J2001')) assert not aaclone.has_data assert aa.obstime != aaclone.obstime assert aa.pressure == aaclone.pressure assert aa.obswl == aaclone.obswl def test_getitem(): rep = r.SphericalRepresentation( [1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc) i = ICRS(rep) assert len(i.ra) == 3 iidx = i[1:] assert len(iidx.ra) == 2 iidx2 = i[0] assert iidx2.ra.isscalar def test_transform(): """ This test just makes sure the transform architecture works, but does *not* actually test all the builtin transforms themselves are accurate. """ i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ == r.UnitSphericalRepresentation assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ != r.UnitSphericalRepresentation f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f4 = f.transform_to(FK4()) f4_2 = f.transform_to(FK4(equinox=f.equinox)) # make sure attributes are copied over correctly assert f4.equinox == FK4().equinox assert f4_2.equinox == f.equinox # make sure self-transforms also work i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i.transform_to(ICRS()) assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f2 = f.transform_to(FK5()) # default equinox, so should be *different* assert f2.equinox == FK5().equinox with pytest.raises(AssertionError): assert_allclose(f.ra, f2.ra) with pytest.raises(AssertionError): assert_allclose(f.dec, f2.dec) # finally, check Galactic round-tripping i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i1.transform_to(Galactic()).transform_to(ICRS()) assert_allclose(i1.ra, i2.ra) assert_allclose(i1.dec, i2.dec) def test_transform_to_nonscalar_nodata_frame(): # https://github.com/astropy/astropy/pull/5254#issuecomment-241592353 times = Time('2016-08-23') + np.linspace(0, 10, 12)*u.day coo1 = ICRS(ra=[[0.], [10.], [20.]]*u.deg, dec=[[-30.], [30.], [60.]]*u.deg) coo2 = coo1.transform_to(FK5(equinox=times)) assert coo2.shape == (3, 12) def test_setitem_no_velocity(): """Test different flavors of item setting for a Frame without a velocity. """ obstime = 'B1955' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = sc0.copy() sc1_repr = repr(sc1) assert 'representation' in sc1.cache sc1[1] = sc2[0] assert sc1.cache == {} assert repr(sc2) != sc1_repr assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) # Works for array-valued obstime so long as they are considered equivalent sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, obstime]) sc1[0] = sc2[0] # Multidimensional coordinates sc1 = FK4([[1, 2], [3, 4]] * u.deg, [[5, 6], [7, 8]] * u.deg) sc2 = FK4([[10, 20], [30, 40]] * u.deg, [[50, 60], [70, 80]] * u.deg) sc1[0] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [[10, 20], [3, 4]]) assert np.allclose(sc1.dec.to_value(u.deg), [[50, 60], [7, 8]]) def test_setitem_velocities(): """Test different flavors of item setting for a Frame with a velocity. """ sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s, obstime='B1950') sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, radial_velocity=[10, 20]*u.km/u.s, obstime='B1950') sc1 = sc0.copy() sc1[1] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [1, 10]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 10]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 20]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [20, 10]) def test_setitem_exceptions(): obstime = 'B1950' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = Galactic(sc0.ra, sc0.dec) with pytest.raises(TypeError, match='can only set from object of same class: ' 'Galactic vs. FK4'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime='B2001') with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra[0], sc0.dec[0], obstime=obstime) with pytest.raises(TypeError, match="scalar 'FK4' frame object does not support " 'item assignment'): sc1[0] = sc2[0] sc1 = FK4(obstime=obstime) with pytest.raises(ValueError, match='cannot set frame which has no data'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] # Wrong shape sc1 = FK4([sc0.ra], [sc0.dec], obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] def test_sep(): i1 = ICRS(ra=0*u.deg, dec=1*u.deg) i2 = ICRS(ra=0*u.deg, dec=2*u.deg) sep = i1.separation(i2) assert sep.deg == 1 i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc) sep3d = i3.separation_3d(i4) assert_allclose(sep3d.to(u.kpc), np.array([1, 1])*u.kpc) # check that it works even with velocities i5 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) i6 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[7, 8]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) sep3d = i5.separation_3d(i6) assert_allclose(sep3d.to(u.kpc), np.array([2, 2])*u.kpc) # 3d separations of dimensionless distances should still work i7 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.one) i8 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=4*u.one) sep3d = i7.separation_3d(i8) assert_allclose(sep3d, 1*u.one) # but should fail with non-dimensionless with pytest.raises(ValueError): i7.separation_3d(i3) def test_time_inputs(): """ Test validation and conversion of inputs for equinox and obstime attributes. """ c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00') assert c.equinox == Time('J2001.5') assert c.obstime == Time('2000-01-01 12:00:00') with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5) assert 'Invalid time input' in str(err.value) with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, obstime='hello') assert 'Invalid time input' in str(err.value) # A vector time should work if the shapes match, but we don't automatically # broadcast the basic data (just like time). FK4([1, 2] * u.deg, [2, 3] * u.deg, obstime=['J2000', 'J2001']) with pytest.raises(ValueError) as err: FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001']) assert 'shape' in str(err.value) def test_is_frame_attr_default(): """ Check that the `is_frame_attr_default` machinery works as expected """ c1 = FK5(ra=1*u.deg, dec=1*u.deg) c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox']) c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5')) assert c1.equinox == c2.equinox assert c1.equinox != c3.equinox assert c1.is_frame_attr_default('equinox') assert not c2.is_frame_attr_default('equinox') assert not c3.is_frame_attr_default('equinox') c4 = c1.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) c5 = c2.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) assert c4.is_frame_attr_default('equinox') assert not c5.is_frame_attr_default('equinox') def test_altaz_attributes(): aa = AltAz(1*u.deg, 2*u.deg) assert aa.obstime is None assert aa.location is None aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000') assert aa2.obstime == Time('J2000') aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert isinstance(aa3.location, EarthLocation) def test_hadec_attributes(): hd = HADec(1*u.hourangle, 2*u.deg) assert hd.ha == 1.*u.hourangle assert hd.dec == 2*u.deg assert hd.obstime is None assert hd.location is None hd2 = HADec(23*u.hourangle, -2*u.deg, obstime='J2000', location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert_allclose(hd2.ha, -1*u.hourangle) assert hd2.dec == -2*u.deg assert hd2.obstime == Time('J2000') assert isinstance(hd2.location, EarthLocation) sr = hd2.represent_as(r.SphericalRepresentation) assert_allclose(sr.lon, -1*u.hourangle) def test_representation(): """ Test the getter and setter properties for `representation` """ # Create the frame object. icrs = ICRS(ra=1*u.deg, dec=1*u.deg) data = icrs.data # Create some representation objects. icrs_cart = icrs.cartesian icrs_spher = icrs.spherical icrs_cyl = icrs.cylindrical # Testing when `_representation` set to `CartesianRepresentation`. icrs.representation_type = r.CartesianRepresentation assert icrs.representation_type == r.CartesianRepresentation assert icrs_cart.x == icrs.x assert icrs_cart.y == icrs.y assert icrs_cart.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CartesianRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing when `_representation` set to `CylindricalRepresentation`. icrs.representation_type = r.CylindricalRepresentation assert icrs.representation_type == r.CylindricalRepresentation assert icrs.data == data # Testing setter input using text argument for spherical. icrs.representation_type = 'spherical' assert icrs.representation_type is r.SphericalRepresentation assert icrs_spher.lat == icrs.dec assert icrs_spher.lon == icrs.ra assert icrs_spher.distance == icrs.distance assert icrs.data == data # Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes. for attr in ('x', 'y', 'z'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing setter input using text argument for cylindrical. icrs.representation_type = 'cylindrical' assert icrs.representation_type is r.CylindricalRepresentation assert icrs_cyl.rho == icrs.rho assert icrs_cyl.phi == icrs.phi assert icrs_cyl.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CylindricalRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = 'WRONG' assert 'but must be a BaseRepresentation class' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = ICRS assert 'but must be a BaseRepresentation class' in str(err.value) def test_represent_as(): icrs = ICRS(ra=1*u.deg, dec=1*u.deg) cart1 = icrs.represent_as('cartesian') cart2 = icrs.represent_as(r.CartesianRepresentation) cart1.x == cart2.x cart1.y == cart2.y cart1.z == cart2.z # now try with velocities icrs = ICRS(ra=0*u.deg, dec=0*u.deg, distance=10*u.kpc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=1*u.km/u.s) # single string rep2 = icrs.represent_as('cylindrical') assert isinstance(rep2, r.CylindricalRepresentation) assert isinstance(rep2.differentials['s'], r.CylindricalDifferential) # single class with positional in_frame_units, verify that warning raised with pytest.warns(AstropyWarning, match='argument position') as w: icrs.represent_as(r.CylindricalRepresentation, False) assert len(w) == 1 # TODO: this should probably fail in the future once we figure out a better # workaround for dealing with UnitSphericalRepresentation's with # RadialDifferential's # two classes # rep2 = icrs.represent_as(r.CartesianRepresentation, # r.SphericalCosLatDifferential) # assert isinstance(rep2, r.CartesianRepresentation) # assert isinstance(rep2.differentials['s'], r.SphericalCosLatDifferential) with pytest.raises(ValueError): icrs.represent_as('odaigahara') def test_shorthand_representations(): rep = r.CartesianRepresentation([1, 2, 3]*u.pc) dif = r.CartesianDifferential([1, 2, 3]*u.km/u.s) rep = rep.with_differentials(dif) icrs = ICRS(rep) cyl = icrs.cylindrical assert isinstance(cyl, r.CylindricalRepresentation) assert isinstance(cyl.differentials['s'], r.CylindricalDifferential) sph = icrs.spherical assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalDifferential) sph = icrs.sphericalcoslat assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalCosLatDifferential) def test_equal(): obstime = 'B1955' sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([1, 20]*u.deg, [3, 4]*u.deg, obstime=obstime) # Compare arrays and scalars eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa (numpy True not Python True) assert (sc1[0] != sc2[0]) == False # noqa # Broadcasting eq = sc1[0] == sc2 ne = sc1[0] != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) # With diff only in velocity sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s) sc2 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 20]*u.km/u.s) eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa assert (sc1[0] != sc2[0]) == False # noqa assert (FK4() == ICRS()) is False assert (FK4() == FK4(obstime='J1999')) is False def test_equal_exceptions(): # Shape mismatch sc1 = FK4([1, 2, 3]*u.deg, [3, 4, 5]*u.deg) with pytest.raises(ValueError, match='cannot compare: shape mismatch'): sc1 == sc1[:2] # Different representation_type sc1 = FK4(1, 2, 3, representation_type='cartesian') sc2 = FK4(1*u.deg, 2*u.deg, 2, representation_type='spherical') with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: CartesianRepresentation vs. SphericalRepresentation'): sc1 == sc2 # Different differential type sc1 = FK4(1*u.deg, 2*u.deg, radial_velocity=1*u.km/u.s) sc2 = FK4(1*u.deg, 2*u.deg, pm_ra_cosdec=1*u.mas/u.yr, pm_dec=1*u.mas/u.yr) with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: RadialDifferential vs. UnitSphericalCosLatDifferential'): sc1 == sc2 # Different frame attribute sc1 = FK5(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J1999') with pytest.raises(TypeError, match=r'cannot compare: objects must have equivalent ' r'frames: <FK5 Frame \(equinox=J2000.000\)> ' r'vs. <FK5 Frame \(equinox=J1999.000\)>'): sc1 == sc2 # Different frame sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') with pytest.raises(TypeError, match='cannot compare: objects must have equivalent ' r'frames: <FK4 Frame \(equinox=B1950.000, obstime=B1950.000\)> ' r'vs. <FK5 Frame \(equinox=J2000.000\)>'): sc1 == sc2 sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK4() with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc1 == sc2 with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc2 == sc1 def test_dynamic_attrs(): c = ICRS(1*u.deg, 2*u.deg) assert 'ra' in dir(c) assert 'dec' in dir(c) with pytest.raises(AttributeError) as err: c.blahblah assert "object has no attribute 'blahblah'" in str(err.value) with pytest.raises(AttributeError) as err: c.ra = 1 assert "Cannot set any frame attribute" in str(err.value) c.blahblah = 1 assert c.blahblah == 1 def test_nodata_error(): i = ICRS() with pytest.raises(ValueError) as excinfo: i.data assert 'does not have associated data' in str(excinfo.value) def test_len0_data(): i = ICRS([]*u.deg, []*u.deg) assert i.has_data repr(i) def test_quantity_attributes(): # make sure we can create a GCRS frame with valid inputs GCRS(obstime='J2002', obsgeoloc=[1, 2, 3]*u.km, obsgeovel=[4, 5, 6]*u.km/u.s) # make sure it fails for invalid lovs or vels with pytest.raises(TypeError): GCRS(obsgeoloc=[1, 2, 3]) # no unit with pytest.raises(u.UnitsError): GCRS(obsgeoloc=[1, 2, 3]*u.km/u.s) # incorrect unit with pytest.raises(ValueError): GCRS(obsgeoloc=[1, 3]*u.km) # incorrect shape def test_quantity_attribute_default(): # The default default (yes) is None: class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.deg) frame = MyCoord() assert frame.someval is None frame = MyCoord(someval=15*u.deg) assert u.isclose(frame.someval, 15*u.deg) # This should work if we don't explicitly pass in a unit, but we pass in a # default value with a unit class MyCoord2(BaseCoordinateFrame): someval = QuantityAttribute(15*u.deg) frame = MyCoord2() assert u.isclose(frame.someval, 15*u.deg) # Since here no shape was given, we can set to any shape we like. frame = MyCoord2(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert np.all(frame.someval == 1*u.deg) # We should also be able to insist on a given shape. class MyCoord3(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.arcsec, shape=(3,)) frame = MyCoord3(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert frame.someval.unit == u.arcsec assert u.allclose(frame.someval.value, 3600.) # The wrong shape raises. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=1.*u.deg) # As does the wrong unit. with pytest.raises(u.UnitsError): MyCoord3(someval=np.ones(3)*u.m) # We are allowed a short-cut for zero. frame0 = MyCoord3(someval=0) assert frame0.someval.shape == (3,) assert frame0.someval.unit == u.arcsec assert np.all(frame0.someval.value == 0.) # But not if it has the wrong shape. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=np.zeros(2)) # This should fail, if we don't pass in a default or a unit with pytest.raises(ValueError): class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute() def test_eloc_attributes(): el = EarthLocation(lon=12.3*u.deg, lat=45.6*u.deg, height=1*u.km) it = ITRS(r.SphericalRepresentation(lon=12.3*u.deg, lat=45.6*u.deg, distance=1*u.km)) gc = GCRS(ra=12.3*u.deg, dec=45.6*u.deg, distance=6375*u.km) el1 = AltAz(location=el).location assert isinstance(el1, EarthLocation) # these should match *exactly* because the EarthLocation assert el1.lat == el.lat assert el1.lon == el.lon assert el1.height == el.height el2 = AltAz(location=it).location assert isinstance(el2, EarthLocation) # these should *not* match because giving something in Spherical ITRS is # *not* the same as giving it as an EarthLocation: EarthLocation is on an # elliptical geoid. So the longitude should match (because flattening is # only along the z-axis), but latitude should not. Also, height is relative # to the *surface* in EarthLocation, but the ITRS distance is relative to # the center of the Earth assert not allclose(el2.lat, it.spherical.lat) assert allclose(el2.lon, it.spherical.lon) assert el2.height < -6000*u.km el3 = AltAz(location=gc).location # GCRS inputs implicitly get transformed to ITRS and then onto # EarthLocation's elliptical geoid. So both lat and lon shouldn't match assert isinstance(el3, EarthLocation) assert not allclose(el3.lat, gc.dec) assert not allclose(el3.lon, gc.ra) assert np.abs(el3.height) < 500*u.km def test_equivalent_frames(): i = ICRS() i2 = ICRS(1*u.deg, 2*u.deg) assert i.is_equivalent_frame(i) assert i.is_equivalent_frame(i2) with pytest.raises(TypeError): assert i.is_equivalent_frame(10) with pytest.raises(TypeError): assert i2.is_equivalent_frame(SkyCoord(i2)) f0 = FK5() # this J2000 is TT f1 = FK5(equinox='J2000') f2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') f3 = FK5(equinox='J2010') f4 = FK4(equinox='J2010') assert f1.is_equivalent_frame(f1) assert not i.is_equivalent_frame(f1) assert f0.is_equivalent_frame(f1) assert f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f3.is_equivalent_frame(f4) aa1 = AltAz() aa2 = AltAz(obstime='J2010') assert aa2.is_equivalent_frame(aa2) assert not aa1.is_equivalent_frame(i) assert not aa1.is_equivalent_frame(aa2) def test_equivalent_frame_coordinateattribute(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) # These frames should not be considered equivalent f0 = FrameWithCoordinateAttribute() f1 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2000')) f2 = FrameWithCoordinateAttribute(coord_attr=HCRS(3*u.deg, 4*u.deg, obstime='J2000')) f3 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2001')) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) assert not f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f2.is_equivalent_frame(f3) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) assert f2.is_equivalent_frame(deepcopy(f2)) assert f3.is_equivalent_frame(deepcopy(f3)) def test_equivalent_frame_locationattribute(): class FrameWithLocationAttribute(BaseCoordinateFrame): loc_attr = EarthLocationAttribute() # These frames should not be considered equivalent f0 = FrameWithLocationAttribute() location = EarthLocation(lat=-34, lon=19, height=300) f1 = FrameWithLocationAttribute(loc_attr=location) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) def test_representation_subclass(): # Regression test for #3354 # Normally when instantiating a frame without a distance the frame will try # and use UnitSphericalRepresentation internally instead of # SphericalRepresentation. frame = FK5(representation_type=r.SphericalRepresentation, ra=32 * u.deg, dec=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == r.SphericalRepresentation # If using a SphericalRepresentation class this used to not work, so we # test here that this is now fixed. class NewSphericalRepresentation(r.SphericalRepresentation): attr_classes = r.SphericalRepresentation.attr_classes frame = FK5(representation_type=NewSphericalRepresentation, lon=32 * u.deg, lat=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == NewSphericalRepresentation # A similar issue then happened in __repr__ with subclasses of # SphericalRepresentation. assert repr(frame) == ("<FK5 Coordinate (equinox=J2000.000): (lon, lat) in deg\n" " (32., 20.)>") # A more subtle issue is when specifying a custom # UnitSphericalRepresentation subclass for the data and # SphericalRepresentation or a subclass for the representation. class NewUnitSphericalRepresentation(r.UnitSphericalRepresentation): attr_classes = r.UnitSphericalRepresentation.attr_classes def __repr__(self): return "<NewUnitSphericalRepresentation: spam spam spam>" frame = FK5(NewUnitSphericalRepresentation(lon=32 * u.deg, lat=20 * u.deg), representation_type=NewSphericalRepresentation) assert repr(frame) == "<FK5 Coordinate (equinox=J2000.000): spam spam spam>" def test_getitem_representation(): """ Make sure current representation survives __getitem__ even if different from data representation. """ c = ICRS([1, 1] * u.deg, [2, 2] * u.deg) c.representation_type = 'cartesian' assert c[0].representation_type is r.CartesianRepresentation def test_component_error_useful(): """ Check that a data-less frame gives useful error messages about not having data when the attributes asked for are possible coordinate components """ i = ICRS() with pytest.raises(ValueError) as excinfo: i.ra assert 'does not have associated data' in str(excinfo.value) with pytest.raises(AttributeError) as excinfo1: i.foobar with pytest.raises(AttributeError) as excinfo2: i.lon # lon is *not* the component name despite being the underlying representation's name assert "object has no attribute 'foobar'" in str(excinfo1.value) assert "object has no attribute 'lon'" in str(excinfo2.value) def test_cache_clear(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) assert len(i.cache['representation']) == 2 i.cache.clear() assert len(i.cache['representation']) == 0 def test_inplace_array(): i = ICRS([[1, 2], [3, 4]]*u.deg, [[10, 20], [30, 40]]*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[:, 0] = [100, 200]*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert_allclose(i.ra, [[100, 2], [200, 4]]*u.deg) assert_allclose(i.dec, [[10, 20], [30, 40]]*u.deg) def test_inplace_change(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[()] = 10*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert i.ra == 10 * u.deg assert i.dec == 2 * u.deg def test_representation_with_multiple_differentials(): dif1 = r.CartesianDifferential([1, 2, 3]*u.km/u.s) dif2 = r.CartesianDifferential([1, 2, 3]*u.km/u.s**2) rep = r.CartesianRepresentation([1, 2, 3]*u.pc, differentials={'s': dif1, 's2': dif2}) # check warning is raised for a scalar with pytest.raises(ValueError): ICRS(rep) def test_representation_arg_backwards_compatibility(): # TODO: this test can be removed when the `representation` argument is # removed from the BaseCoordinateFrame initializer. c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c2 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c3 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian') assert c1.x == c2.x assert c1.y == c2.y assert c1.z == c2.z assert c1.x == c3.x assert c1.y == c3.y assert c1.z == c3.z assert c1.representation_type == c1.representation_type with pytest.raises(ValueError): ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian', representation='cartesian') def test_missing_component_error_names(): """ This test checks that the component names are frame component names, not representation or differential names, when referenced in an exception raised when not passing in enough data. For example: ICRS(ra=10*u.deg) should state: TypeError: __init__() missing 1 required positional argument: 'dec' """ with pytest.raises(TypeError) as e: ICRS(ra=150 * u.deg) assert "missing 1 required positional argument: 'dec'" in str(e.value) with pytest.raises(TypeError) as e: ICRS(ra=150*u.deg, dec=-11*u.deg, pm_ra=100*u.mas/u.yr, pm_dec=10*u.mas/u.yr) assert "pm_ra_cosdec" in str(e.value) def test_non_spherical_representation_unit_creation(unitphysics): class PhysicsICRS(ICRS): default_representation = r.PhysicsSphericalRepresentation pic = PhysicsICRS(phi=1*u.deg, theta=25*u.deg, r=1*u.kpc) assert isinstance(pic.data, r.PhysicsSphericalRepresentation) picu = PhysicsICRS(phi=1*u.deg, theta=25*u.deg) assert isinstance(picu.data, unitphysics) def test_attribute_repr(): class Spam: def _astropy_repr_in_frame(self): return "TEST REPR" class TestFrame(BaseCoordinateFrame): attrtest = Attribute(default=Spam()) assert "TEST REPR" in repr(TestFrame()) def test_component_names_repr(): # Frame class with new component names that includes a name swap class NameChangeFrame(BaseCoordinateFrame): default_representation = r.PhysicsSphericalRepresentation frame_specific_representation_info = { r.PhysicsSphericalRepresentation: [ RepresentationMapping('phi', 'theta', u.deg), RepresentationMapping('theta', 'phi', u.arcsec), RepresentationMapping('r', 'JUSTONCE', u.AU)] } frame = NameChangeFrame(0*u.deg, 0*u.arcsec, 0*u.AU) # Check for the new names in the Frame repr assert "(theta, phi, JUSTONCE)" in repr(frame) # Check that the letter "r" has not been replaced more than once in the Frame repr assert repr(frame).count("JUSTONCE") == 1 @pytest.fixture def reset_galactocentric_defaults(): # TODO: this can be removed, along with the "warning" test below, once we # switch the default to 'latest' in v4.1 # Resets before each test, and after (the yield is pytest magic) galactocentric_frame_defaults.set('v4.0') yield galactocentric_frame_defaults.set('v4.0') def test_galactocentric_defaults(reset_galactocentric_defaults): with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() with galactocentric_frame_defaults.set('latest'): galcen_latest = Galactocentric() # parameters that changed assert not u.allclose(galcen_pre40.galcen_distance, galcen_40.galcen_distance) assert not u.allclose(galcen_pre40.z_sun, galcen_40.z_sun) for k in galcen_40.get_frame_attr_names(): if isinstance(getattr(galcen_40, k), BaseCoordinateFrame): continue # skip coordinate comparison... elif isinstance(getattr(galcen_40, k), CartesianDifferential): assert u.allclose(getattr(galcen_40, k).d_xyz, getattr(galcen_latest, k).d_xyz) else: assert getattr(galcen_40, k) == getattr(galcen_latest, k) # test validate Galactocentric with galactocentric_frame_defaults.set('latest'): params = galactocentric_frame_defaults.validate(galcen_latest) references = galcen_latest.frame_attribute_references state = dict(parameters=params, references=references) assert galactocentric_frame_defaults.parameters == params assert galactocentric_frame_defaults.references == references assert galactocentric_frame_defaults._state == state # Test not one of accepted parameter types with pytest.raises(ValueError): galactocentric_frame_defaults.validate(ValueError) # test parameters property assert ( galactocentric_frame_defaults.parameters == galactocentric_frame_defaults.parameters ) def test_galactocentric_references(reset_galactocentric_defaults): # references in the "scientific paper"-sense with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() for k in galcen_pre40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_pre40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() for k in galcen_40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_custom = Galactocentric(z_sun=15*u.pc) for k in galcen_custom.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue if k == 'z_sun': assert k not in galcen_custom.frame_attribute_references else: assert k in galcen_custom.frame_attribute_references def test_coordinateattribute_transformation(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) hcrs = HCRS(1*u.deg, 2*u.deg, 3*u.AU, obstime='2001-02-03') f1_frame = FrameWithCoordinateAttribute(coord_attr=hcrs) f1_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(hcrs)) # The input is already HCRS, so the frame attribute should not change it assert f1_frame.coord_attr == hcrs # The output should not be different if a SkyCoord is provided assert f1_skycoord.coord_attr == f1_frame.coord_attr gcrs = GCRS(4*u.deg, 5*u.deg, 6*u.AU, obstime='2004-05-06') f2_frame = FrameWithCoordinateAttribute(coord_attr=gcrs) f2_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(gcrs)) # The input needs to be converted from GCRS to HCRS assert isinstance(f2_frame.coord_attr, HCRS) # The `obstime` frame attribute should have been "merged" in a SkyCoord-style transformation assert f2_frame.coord_attr.obstime == gcrs.obstime # The output should not be different if a SkyCoord is provided assert f2_skycoord.coord_attr == f2_frame.coord_attr def test_realize_frame_accepts_kwargs(): c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) new_data = r.CartesianRepresentation(x=11*u.pc, y=12*u.pc, z=13*u.pc) c2 = c1.realize_frame(new_data, representation_type="cartesian") c3 = c1.realize_frame(new_data, representation_type="cylindrical") assert c2.representation_type == r.CartesianRepresentation assert c3.representation_type == r.CylindricalRepresentation def test_nameless_frame_subclass(): """Note: this is a regression test for #11096""" class Test: pass # Subclass from a frame class and a non-frame class. # This subclassing is the test! class NewFrame(ICRS, Test): pass
astropy/astropy
astropy/coordinates/tests/test_frames.py
astropy/constants/utils.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This module contains convenience functions implementing some of the algorithms contained within Jean Meeus, 'Astronomical Algorithms', second edition, 1998, Willmann-Bell. """ import numpy as np from numpy.polynomial.polynomial import polyval import erfa from astropy.utils import deprecated from astropy import units as u from . import ICRS, SkyCoord, GeocentricTrueEcliptic from .builtin_frames.utils import get_jd12 __all__ = ["calc_moon"] # Meeus 1998: table 47.A # D M M' F l r _MOON_L_R = ( (0, 0, 1, 0, 6288774, -20905355), (2, 0, -1, 0, 1274027, -3699111), (2, 0, 0, 0, 658314, -2955968), (0, 0, 2, 0, 213618, -569925), (0, 1, 0, 0, -185116, 48888), (0, 0, 0, 2, -114332, -3149), (2, 0, -2, 0, 58793, 246158), (2, -1, -1, 0, 57066, -152138), (2, 0, 1, 0, 53322, -170733), (2, -1, 0, 0, 45758, -204586), (0, 1, -1, 0, -40923, -129620), (1, 0, 0, 0, -34720, 108743), (0, 1, 1, 0, -30383, 104755), (2, 0, 0, -2, 15327, 10321), (0, 0, 1, 2, -12528, 0), (0, 0, 1, -2, 10980, 79661), (4, 0, -1, 0, 10675, -34782), (0, 0, 3, 0, 10034, -23210), (4, 0, -2, 0, 8548, -21636), (2, 1, -1, 0, -7888, 24208), (2, 1, 0, 0, -6766, 30824), (1, 0, -1, 0, -5163, -8379), (1, 1, 0, 0, 4987, -16675), (2, -1, 1, 0, 4036, -12831), (2, 0, 2, 0, 3994, -10445), (4, 0, 0, 0, 3861, -11650), (2, 0, -3, 0, 3665, 14403), (0, 1, -2, 0, -2689, -7003), (2, 0, -1, 2, -2602, 0), (2, -1, -2, 0, 2390, 10056), (1, 0, 1, 0, -2348, 6322), (2, -2, 0, 0, 2236, -9884), (0, 1, 2, 0, -2120, 5751), (0, 2, 0, 0, -2069, 0), (2, -2, -1, 0, 2048, -4950), (2, 0, 1, -2, -1773, 4130), (2, 0, 0, 2, -1595, 0), (4, -1, -1, 0, 1215, -3958), (0, 0, 2, 2, -1110, 0), (3, 0, -1, 0, -892, 3258), (2, 1, 1, 0, -810, 2616), (4, -1, -2, 0, 759, -1897), (0, 2, -1, 0, -713, -2117), (2, 2, -1, 0, -700, 2354), (2, 1, -2, 0, 691, 0), (2, -1, 0, -2, 596, 0), (4, 0, 1, 0, 549, -1423), (0, 0, 4, 0, 537, -1117), (4, -1, 0, 0, 520, -1571), (1, 0, -2, 0, -487, -1739), (2, 1, 0, -2, -399, 0), (0, 0, 2, -2, -381, -4421), (1, 1, 1, 0, 351, 0), (3, 0, -2, 0, -340, 0), (4, 0, -3, 0, 330, 0), (2, -1, 2, 0, 327, 0), (0, 2, 1, 0, -323, 1165), (1, 1, -1, 0, 299, 0), (2, 0, 3, 0, 294, 0), (2, 0, -1, -2, 0, 8752) ) # Meeus 1998: table 47.B # D M M' F b _MOON_B = ( (0, 0, 0, 1, 5128122), (0, 0, 1, 1, 280602), (0, 0, 1, -1, 277693), (2, 0, 0, -1, 173237), (2, 0, -1, 1, 55413), (2, 0, -1, -1, 46271), (2, 0, 0, 1, 32573), (0, 0, 2, 1, 17198), (2, 0, 1, -1, 9266), (0, 0, 2, -1, 8822), (2, -1, 0, -1, 8216), (2, 0, -2, -1, 4324), (2, 0, 1, 1, 4200), (2, 1, 0, -1, -3359), (2, -1, -1, 1, 2463), (2, -1, 0, 1, 2211), (2, -1, -1, -1, 2065), (0, 1, -1, -1, -1870), (4, 0, -1, -1, 1828), (0, 1, 0, 1, -1794), (0, 0, 0, 3, -1749), (0, 1, -1, 1, -1565), (1, 0, 0, 1, -1491), (0, 1, 1, 1, -1475), (0, 1, 1, -1, -1410), (0, 1, 0, -1, -1344), (1, 0, 0, -1, -1335), (0, 0, 3, 1, 1107), (4, 0, 0, -1, 1021), (4, 0, -1, 1, 833), # second column (0, 0, 1, -3, 777), (4, 0, -2, 1, 671), (2, 0, 0, -3, 607), (2, 0, 2, -1, 596), (2, -1, 1, -1, 491), (2, 0, -2, 1, -451), (0, 0, 3, -1, 439), (2, 0, 2, 1, 422), (2, 0, -3, -1, 421), (2, 1, -1, 1, -366), (2, 1, 0, 1, -351), (4, 0, 0, 1, 331), (2, -1, 1, 1, 315), (2, -2, 0, -1, 302), (0, 0, 1, 3, -283), (2, 1, 1, -1, -229), (1, 1, 0, -1, 223), (1, 1, 0, 1, 223), (0, 1, -2, -1, -220), (2, 1, -1, -1, -220), (1, 0, 1, 1, -185), (2, -1, -2, -1, 181), (0, 1, 2, 1, -177), (4, 0, -2, -1, 176), (4, -1, -1, -1, 166), (1, 0, 1, -1, -164), (4, 0, 1, -1, 132), (1, 0, -1, -1, -119), (4, -1, 0, -1, 115), (2, -2, 0, 1, 107) ) """ Coefficients of polynomials for various terms: Lc : Mean longitude of Moon, w.r.t mean Equinox of date D : Mean elongation of the Moon M: Sun's mean anomaly Mc : Moon's mean anomaly F : Moon's argument of latitude (mean distance of Moon from its ascending node). """ _coLc = (2.18316448e+02, 4.81267881e+05, -1.57860000e-03, 1.85583502e-06, -1.53388349e-08) _coD = (2.97850192e+02, 4.45267111e+05, -1.88190000e-03, 1.83194472e-06, -8.84447000e-09) _coM = (3.57529109e+02, 3.59990503e+04, -1.53600000e-04, 4.08329931e-08) _coMc = (1.34963396e+02, 4.77198868e+05, 8.74140000e-03, 1.43474081e-05, -6.79717238e-08) _coF = (9.32720950e+01, 4.83202018e+05, -3.65390000e-03, -2.83607487e-07, 1.15833246e-09) _coA1 = (119.75, 131.849) _coA2 = (53.09, 479264.290) _coA3 = (313.45, 481266.484) _coE = (1.0, -0.002516, -0.0000074) @deprecated(since="5.0", alternative="astropy.coordinates.get_moon", message=("The private calc_moon function has been deprecated, " "as its functionality is now available in ERFA. " "Note that the coordinate system was not interpreted " "quite correctly, leading to small inaccuracies. Please " "use the public get_moon or get_body functions instead.")) def calc_moon(t): """ Lunar position model ELP2000-82 of (Chapront-Touze' and Chapront, 1983, 124, 50) This is the simplified version of Jean Meeus, Astronomical Algorithms, second edition, 1998, Willmann-Bell. Meeus claims approximate accuracy of 10" in longitude and 4" in latitude, with no specified time range. Tests against JPL ephemerides show accuracy of 10 arcseconds and 50 km over the date range CE 1950-2050. Parameters ---------- t : `~astropy.time.Time` Time of observation. Returns ------- skycoord : `~astropy.coordinates.SkyCoord` ICRS Coordinate for the body """ # number of centuries since J2000.0. # This should strictly speaking be in Ephemeris Time, but TDB or TT # will introduce error smaller than intrinsic accuracy of algorithm. T = (t.tdb.jyear-2000.0)/100. # constants that are needed for all calculations Lc = u.Quantity(polyval(T, _coLc), u.deg) D = u.Quantity(polyval(T, _coD), u.deg) M = u.Quantity(polyval(T, _coM), u.deg) Mc = u.Quantity(polyval(T, _coMc), u.deg) F = u.Quantity(polyval(T, _coF), u.deg) A1 = u.Quantity(polyval(T, _coA1), u.deg) A2 = u.Quantity(polyval(T, _coA2), u.deg) A3 = u.Quantity(polyval(T, _coA3), u.deg) E = polyval(T, _coE) suml = sumr = 0.0 for DNum, MNum, McNum, FNum, LFac, RFac in _MOON_L_R: corr = E ** abs(MNum) suml += LFac*corr*np.sin(D*DNum+M*MNum+Mc*McNum+F*FNum) sumr += RFac*corr*np.cos(D*DNum+M*MNum+Mc*McNum+F*FNum) sumb = 0.0 for DNum, MNum, McNum, FNum, BFac in _MOON_B: corr = E ** abs(MNum) sumb += BFac*corr*np.sin(D*DNum+M*MNum+Mc*McNum+F*FNum) suml += (3958*np.sin(A1) + 1962*np.sin(Lc-F) + 318*np.sin(A2)) sumb += (-2235*np.sin(Lc) + 382*np.sin(A3) + 175*np.sin(A1-F) + 175*np.sin(A1+F) + 127*np.sin(Lc-Mc) - 115*np.sin(Lc+Mc)) # ensure units suml = suml*u.microdegree sumb = sumb*u.microdegree # nutation of longitude jd1, jd2 = get_jd12(t, 'tt') nut, _ = erfa.nut06a(jd1, jd2) nut = nut*u.rad # calculate ecliptic coordinates lon = Lc + suml + nut lat = sumb dist = (385000.56+sumr/1000)*u.km # Meeus algorithm gives GeocentricTrueEcliptic coordinates ecliptic_coo = GeocentricTrueEcliptic(lon, lat, distance=dist, obstime=t, equinox=t) return SkyCoord(ecliptic_coo.transform_to(ICRS()))
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from copy import deepcopy import numpy as np import pytest import re from astropy import units as u from astropy.units import allclose from astropy.tests.helper import assert_quantity_allclose as assert_allclose from astropy.utils.exceptions import AstropyWarning from astropy.time import Time from astropy.coordinates import ( EarthLocation, galactocentric_frame_defaults, representation as r, SkyCoord, ) from astropy.coordinates.attributes import ( Attribute, CoordinateAttribute, DifferentialAttribute, EarthLocationAttribute, QuantityAttribute, TimeAttribute, ) from astropy.coordinates.baseframe import ( BaseCoordinateFrame, RepresentationMapping ) from astropy.coordinates.builtin_frames import ( AltAz, HADec, FK4, FK5, Galactic, Galactocentric, GCRS, HCRS, ICRS, ITRS ) from astropy.coordinates.representation import ( CartesianDifferential, REPRESENTATION_CLASSES, ) from .test_representation import unitphysics # this fixture is used below # noqa def setup_function(func): """Copy original 'REPRESENTATIONCLASSES' as attribute in function.""" func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES) def teardown_function(func): """Reset REPRESENTATION_CLASSES to original value.""" REPRESENTATION_CLASSES.clear() REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG) def test_frame_attribute_descriptor(): """Unit tests of the Attribute descriptor.""" class TestAttributes: attr_none = Attribute() attr_2 = Attribute(default=2) attr_3_attr2 = Attribute(default=3, secondary_attribute='attr_2') attr_none_attr2 = Attribute(default=None, secondary_attribute='attr_2') attr_none_nonexist = Attribute( default=None, secondary_attribute='nonexist' ) t = TestAttributes() # Defaults assert t.attr_none is None assert t.attr_2 == 2 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 assert t.attr_none_nonexist is None # No default and non-existent secondary attr # Setting values via '_'-prefixed internal vars # (as would normally done in __init__) t._attr_none = 10 assert t.attr_none == 10 t._attr_2 = 20 assert t.attr_2 == 20 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 t._attr_none_attr2 = 40 assert t.attr_none_attr2 == 40 # Make sure setting values via public attribute fails with pytest.raises(AttributeError) as err: t.attr_none = 5 assert 'Cannot set frame attribute' in str(err.value) def test_frame_subclass_attribute_descriptor(): """Unit test of the attribute descriptors in subclasses.""" _EQUINOX_B1980 = Time('B1980', scale='tai') class MyFK4(FK4): # equinox inherited from FK4, obstime overridden, and newattr is new obstime = TimeAttribute(default=_EQUINOX_B1980) newattr = Attribute(default='newattr') mfk4 = MyFK4() assert mfk4.equinox.value == 'B1950.000' assert mfk4.obstime.value == 'B1980.000' assert mfk4.newattr == 'newattr' assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr']) mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world') assert mfk4.equinox.value == 'J1980.000' assert mfk4.obstime.value == 'J1990.000' assert mfk4.newattr == 'world' def test_frame_multiple_inheritance_attribute_descriptor(): """ Ensure that all attributes are accumulated in case of inheritance from multiple BaseCoordinateFrames. See https://github.com/astropy/astropy/pull/11099#issuecomment-735829157 """ class Frame1(BaseCoordinateFrame): attr1 = Attribute() class Frame2(BaseCoordinateFrame): attr2 = Attribute() class Frame3(Frame1, Frame2): pass assert len(Frame3.frame_attributes) == 2 assert 'attr1' in Frame3.frame_attributes assert 'attr2' in Frame3.frame_attributes # In case the same attribute exists in both frames, the one from the # left-most class in the MRO should take precedence class Frame4(BaseCoordinateFrame): attr1 = Attribute() attr2 = Attribute() class Frame5(Frame1, Frame4): pass assert Frame5.frame_attributes['attr1'] is Frame1.frame_attributes['attr1'] assert Frame5.frame_attributes['attr2'] is Frame4.frame_attributes['attr2'] def test_differentialattribute(): # Test logic of passing input through to allowed class vel = [1, 2, 3]*u.km/u.s dif = r.CartesianDifferential(vel) class TestFrame(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential]) frame1 = TestFrame() frame2 = TestFrame(attrtest=dif) frame3 = TestFrame(attrtest=vel) assert np.all(frame1.attrtest.d_xyz == frame2.attrtest.d_xyz) assert np.all(frame1.attrtest.d_xyz == frame3.attrtest.d_xyz) # This shouldn't work if there is more than one allowed class: class TestFrame2(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential, r.CylindricalDifferential]) frame1 = TestFrame2() frame2 = TestFrame2(attrtest=dif) with pytest.raises(TypeError): TestFrame2(attrtest=vel) def test_create_data_frames(): # from repr i1 = ICRS(r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)) i2 = ICRS(r.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg)) # from preferred name i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) i4 = ICRS(ra=1*u.deg, dec=2*u.deg) assert i1.data.lat == i3.data.lat assert i1.data.lon == i3.data.lon assert i1.data.distance == i3.data.distance assert i2.data.lat == i4.data.lat assert i2.data.lon == i4.data.lon # now make sure the preferred names work as properties assert_allclose(i1.ra, i3.ra) assert_allclose(i2.ra, i4.ra) assert_allclose(i1.distance, i3.distance) with pytest.raises(AttributeError): i1.ra = [11.]*u.deg def test_create_orderered_data(): TOL = 1e-10*u.deg i = ICRS(1*u.deg, 2*u.deg) assert (i.ra - 1*u.deg) < TOL assert (i.dec - 2*u.deg) < TOL g = Galactic(1*u.deg, 2*u.deg) assert (g.l - 1*u.deg) < TOL assert (g.b - 2*u.deg) < TOL a = AltAz(1*u.deg, 2*u.deg) assert (a.az - 1*u.deg) < TOL assert (a.alt - 2*u.deg) < TOL with pytest.raises(TypeError): ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg) with pytest.raises(TypeError): sph = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) ICRS(sph, 1*u.deg, 2*u.deg) def test_create_nodata_frames(): i = ICRS() assert len(i.get_frame_attr_names()) == 0 f5 = FK5() assert f5.equinox == FK5.get_frame_attr_names()['equinox'] f4 = FK4() assert f4.equinox == FK4.get_frame_attr_names()['equinox'] # obstime is special because it's a property that uses equinox if obstime is not set assert f4.obstime in (FK4.get_frame_attr_names()['obstime'], FK4.get_frame_attr_names()['equinox']) def test_no_data_nonscalar_frames(): a1 = AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3, 1)) * u.deg_C) assert a1.obstime.shape == (3, 10) assert a1.temperature.shape == (3, 10) assert a1.shape == (3, 10) with pytest.raises(ValueError) as exc: AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3,)) * u.deg_C) assert 'inconsistent shapes' in str(exc.value) def test_frame_repr(): i = ICRS() assert repr(i) == '<ICRS Frame>' f5 = FK5() assert repr(f5).startswith('<FK5 Frame (equinox=') i2 = ICRS(ra=1*u.deg, dec=2*u.deg) i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' (1., 2., 3.)>') # try with arrays i2 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[2.1, 3.1]*u.deg) i3 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[-15.6, 17.1]*u.deg, distance=[11., 21.]*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' [(1.1, 2.1), (2.1, 3.1)]>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' [(1.1, -15.6, 11.), (2.1, 17.1, 21.)]>') def test_frame_repr_vels(): i = ICRS(ra=1*u.deg, dec=2*u.deg, pm_ra_cosdec=1*u.marcsec/u.yr, pm_dec=2*u.marcsec/u.yr) # unit comes out as mas/yr because of the preferred units defined in the # frame RepresentationMapping assert repr(i) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)\n' ' (pm_ra_cosdec, pm_dec) in mas / yr\n' ' (1., 2.)>') def test_converting_units(): # this is a regular expression that with split (see below) removes what's # the decimal point to fix rounding problems rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)') # Use values that aren't subject to rounding down to X.9999... i2 = ICRS(ra=2.*u.deg, dec=2.*u.deg) i2_many = ICRS(ra=[2., 4.]*u.deg, dec=[2., -8.1]*u.deg) # converting from FK5 to ICRS and back changes the *internal* representation, # but it should still come out in the preferred form i4 = i2.transform_to(FK5()).transform_to(ICRS()) i4_many = i2_many.transform_to(FK5()).transform_to(ICRS()) ri2 = ''.join(rexrepr.split(repr(i2))) ri4 = ''.join(rexrepr.split(repr(i4))) assert ri2 == ri4 assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed ri2_many = ''.join(rexrepr.split(repr(i2_many))) ri4_many = ''.join(rexrepr.split(repr(i4_many))) assert ri2_many == ri4_many assert i2_many.data.lon.unit != i4_many.data.lon.unit # Internal repr changed # but that *shouldn't* hold if we turn off units for the representation class FakeICRS(ICRS): frame_specific_representation_info = { 'spherical': [RepresentationMapping('lon', 'ra', u.hourangle), RepresentationMapping('lat', 'dec', None), RepresentationMapping('distance', 'distance')] # should fall back to default of None unit } fi = FakeICRS(i4.data) ri2 = ''.join(rexrepr.split(repr(i2))) rfi = ''.join(rexrepr.split(repr(fi))) rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match assert ri2 != rfi # the attributes should also get the right units assert i2.dec.unit == i4.dec.unit # unless no/explicitly given units assert i2.dec.unit != fi.dec.unit assert i2.ra.unit != fi.ra.unit assert fi.ra.unit == u.hourangle def test_representation_info(): class NewICRS1(ICRS): frame_specific_representation_info = { r.SphericalRepresentation: [ RepresentationMapping('lon', 'rara', u.hourangle), RepresentationMapping('lat', 'decdec', u.degree), RepresentationMapping('distance', 'distance', u.kpc)] } i1 = NewICRS1(rara=10*u.degree, decdec=-12*u.deg, distance=1000*u.pc, pm_rara_cosdecdec=100*u.mas/u.yr, pm_decdec=17*u.mas/u.yr, radial_velocity=10*u.km/u.s) assert allclose(i1.rara, 10*u.deg) assert i1.rara.unit == u.hourangle assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.distance, 1000*u.pc) assert i1.distance.unit == u.kpc assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # this should auto-set the names of UnitSpherical: i1.set_representation_cls(r.UnitSphericalRepresentation, s=r.UnitSphericalCosLatDifferential) assert allclose(i1.rara, 10*u.deg) assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # For backwards compatibility, we also support the string name in the # representation info dictionary: class NewICRS2(ICRS): frame_specific_representation_info = { 'spherical': [ RepresentationMapping('lon', 'ang1', u.hourangle), RepresentationMapping('lat', 'ang2', u.degree), RepresentationMapping('distance', 'howfar', u.kpc)] } i2 = NewICRS2(ang1=10*u.degree, ang2=-12*u.deg, howfar=1000*u.pc) assert allclose(i2.ang1, 10*u.deg) assert i2.ang1.unit == u.hourangle assert allclose(i2.ang2, -12*u.deg) assert allclose(i2.howfar, 1000*u.pc) assert i2.howfar.unit == u.kpc # Test that the differential kwargs get overridden class NewICRS3(ICRS): frame_specific_representation_info = { r.SphericalCosLatDifferential: [ RepresentationMapping('d_lon_coslat', 'pm_ang1', u.hourangle/u.year), RepresentationMapping('d_lat', 'pm_ang2'), RepresentationMapping('d_distance', 'vlos', u.kpc/u.Myr)] } i3 = NewICRS3(lon=10*u.degree, lat=-12*u.deg, distance=1000*u.pc, pm_ang1=1*u.mas/u.yr, pm_ang2=2*u.mas/u.yr, vlos=100*u.km/u.s) assert allclose(i3.pm_ang1, 1*u.mas/u.yr) assert i3.pm_ang1.unit == u.hourangle/u.year assert allclose(i3.pm_ang2, 2*u.mas/u.yr) assert allclose(i3.vlos, 100*u.km/u.s) assert i3.vlos.unit == u.kpc/u.Myr def test_realizing(): rep = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) i = ICRS() i2 = i.realize_frame(rep) assert not i.has_data assert i2.has_data f = FK5(equinox=Time('J2001')) f2 = f.realize_frame(rep) assert not f.has_data assert f2.has_data assert f2.equinox == f.equinox assert f2.equinox != FK5.get_frame_attr_names()['equinox'] # Check that a nicer error message is returned: with pytest.raises(TypeError) as excinfo: f.realize_frame(f.representation_type) assert ('Class passed as data instead of a representation' in excinfo.value.args[0]) def test_replicating(): i = ICRS(ra=[1]*u.deg, dec=[2]*u.deg) icopy = i.replicate(copy=True) irepl = i.replicate(copy=False) i.data._lat[:] = 0*u.deg assert np.all(i.data.lat == irepl.data.lat) assert np.all(i.data.lat != icopy.data.lat) iclone = i.replicate_without_data() assert i.has_data assert not iclone.has_data aa = AltAz(alt=1*u.deg, az=2*u.deg, obstime=Time('J2000')) aaclone = aa.replicate_without_data(obstime=Time('J2001')) assert not aaclone.has_data assert aa.obstime != aaclone.obstime assert aa.pressure == aaclone.pressure assert aa.obswl == aaclone.obswl def test_getitem(): rep = r.SphericalRepresentation( [1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc) i = ICRS(rep) assert len(i.ra) == 3 iidx = i[1:] assert len(iidx.ra) == 2 iidx2 = i[0] assert iidx2.ra.isscalar def test_transform(): """ This test just makes sure the transform architecture works, but does *not* actually test all the builtin transforms themselves are accurate. """ i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ == r.UnitSphericalRepresentation assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ != r.UnitSphericalRepresentation f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f4 = f.transform_to(FK4()) f4_2 = f.transform_to(FK4(equinox=f.equinox)) # make sure attributes are copied over correctly assert f4.equinox == FK4().equinox assert f4_2.equinox == f.equinox # make sure self-transforms also work i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i.transform_to(ICRS()) assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f2 = f.transform_to(FK5()) # default equinox, so should be *different* assert f2.equinox == FK5().equinox with pytest.raises(AssertionError): assert_allclose(f.ra, f2.ra) with pytest.raises(AssertionError): assert_allclose(f.dec, f2.dec) # finally, check Galactic round-tripping i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i1.transform_to(Galactic()).transform_to(ICRS()) assert_allclose(i1.ra, i2.ra) assert_allclose(i1.dec, i2.dec) def test_transform_to_nonscalar_nodata_frame(): # https://github.com/astropy/astropy/pull/5254#issuecomment-241592353 times = Time('2016-08-23') + np.linspace(0, 10, 12)*u.day coo1 = ICRS(ra=[[0.], [10.], [20.]]*u.deg, dec=[[-30.], [30.], [60.]]*u.deg) coo2 = coo1.transform_to(FK5(equinox=times)) assert coo2.shape == (3, 12) def test_setitem_no_velocity(): """Test different flavors of item setting for a Frame without a velocity. """ obstime = 'B1955' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = sc0.copy() sc1_repr = repr(sc1) assert 'representation' in sc1.cache sc1[1] = sc2[0] assert sc1.cache == {} assert repr(sc2) != sc1_repr assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) # Works for array-valued obstime so long as they are considered equivalent sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, obstime]) sc1[0] = sc2[0] # Multidimensional coordinates sc1 = FK4([[1, 2], [3, 4]] * u.deg, [[5, 6], [7, 8]] * u.deg) sc2 = FK4([[10, 20], [30, 40]] * u.deg, [[50, 60], [70, 80]] * u.deg) sc1[0] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [[10, 20], [3, 4]]) assert np.allclose(sc1.dec.to_value(u.deg), [[50, 60], [7, 8]]) def test_setitem_velocities(): """Test different flavors of item setting for a Frame with a velocity. """ sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s, obstime='B1950') sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, radial_velocity=[10, 20]*u.km/u.s, obstime='B1950') sc1 = sc0.copy() sc1[1] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [1, 10]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 10]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 20]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [20, 10]) def test_setitem_exceptions(): obstime = 'B1950' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = Galactic(sc0.ra, sc0.dec) with pytest.raises(TypeError, match='can only set from object of same class: ' 'Galactic vs. FK4'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime='B2001') with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra[0], sc0.dec[0], obstime=obstime) with pytest.raises(TypeError, match="scalar 'FK4' frame object does not support " 'item assignment'): sc1[0] = sc2[0] sc1 = FK4(obstime=obstime) with pytest.raises(ValueError, match='cannot set frame which has no data'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] # Wrong shape sc1 = FK4([sc0.ra], [sc0.dec], obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] def test_sep(): i1 = ICRS(ra=0*u.deg, dec=1*u.deg) i2 = ICRS(ra=0*u.deg, dec=2*u.deg) sep = i1.separation(i2) assert sep.deg == 1 i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc) sep3d = i3.separation_3d(i4) assert_allclose(sep3d.to(u.kpc), np.array([1, 1])*u.kpc) # check that it works even with velocities i5 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) i6 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[7, 8]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) sep3d = i5.separation_3d(i6) assert_allclose(sep3d.to(u.kpc), np.array([2, 2])*u.kpc) # 3d separations of dimensionless distances should still work i7 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.one) i8 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=4*u.one) sep3d = i7.separation_3d(i8) assert_allclose(sep3d, 1*u.one) # but should fail with non-dimensionless with pytest.raises(ValueError): i7.separation_3d(i3) def test_time_inputs(): """ Test validation and conversion of inputs for equinox and obstime attributes. """ c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00') assert c.equinox == Time('J2001.5') assert c.obstime == Time('2000-01-01 12:00:00') with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5) assert 'Invalid time input' in str(err.value) with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, obstime='hello') assert 'Invalid time input' in str(err.value) # A vector time should work if the shapes match, but we don't automatically # broadcast the basic data (just like time). FK4([1, 2] * u.deg, [2, 3] * u.deg, obstime=['J2000', 'J2001']) with pytest.raises(ValueError) as err: FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001']) assert 'shape' in str(err.value) def test_is_frame_attr_default(): """ Check that the `is_frame_attr_default` machinery works as expected """ c1 = FK5(ra=1*u.deg, dec=1*u.deg) c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox']) c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5')) assert c1.equinox == c2.equinox assert c1.equinox != c3.equinox assert c1.is_frame_attr_default('equinox') assert not c2.is_frame_attr_default('equinox') assert not c3.is_frame_attr_default('equinox') c4 = c1.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) c5 = c2.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) assert c4.is_frame_attr_default('equinox') assert not c5.is_frame_attr_default('equinox') def test_altaz_attributes(): aa = AltAz(1*u.deg, 2*u.deg) assert aa.obstime is None assert aa.location is None aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000') assert aa2.obstime == Time('J2000') aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert isinstance(aa3.location, EarthLocation) def test_hadec_attributes(): hd = HADec(1*u.hourangle, 2*u.deg) assert hd.ha == 1.*u.hourangle assert hd.dec == 2*u.deg assert hd.obstime is None assert hd.location is None hd2 = HADec(23*u.hourangle, -2*u.deg, obstime='J2000', location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert_allclose(hd2.ha, -1*u.hourangle) assert hd2.dec == -2*u.deg assert hd2.obstime == Time('J2000') assert isinstance(hd2.location, EarthLocation) sr = hd2.represent_as(r.SphericalRepresentation) assert_allclose(sr.lon, -1*u.hourangle) def test_representation(): """ Test the getter and setter properties for `representation` """ # Create the frame object. icrs = ICRS(ra=1*u.deg, dec=1*u.deg) data = icrs.data # Create some representation objects. icrs_cart = icrs.cartesian icrs_spher = icrs.spherical icrs_cyl = icrs.cylindrical # Testing when `_representation` set to `CartesianRepresentation`. icrs.representation_type = r.CartesianRepresentation assert icrs.representation_type == r.CartesianRepresentation assert icrs_cart.x == icrs.x assert icrs_cart.y == icrs.y assert icrs_cart.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CartesianRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing when `_representation` set to `CylindricalRepresentation`. icrs.representation_type = r.CylindricalRepresentation assert icrs.representation_type == r.CylindricalRepresentation assert icrs.data == data # Testing setter input using text argument for spherical. icrs.representation_type = 'spherical' assert icrs.representation_type is r.SphericalRepresentation assert icrs_spher.lat == icrs.dec assert icrs_spher.lon == icrs.ra assert icrs_spher.distance == icrs.distance assert icrs.data == data # Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes. for attr in ('x', 'y', 'z'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing setter input using text argument for cylindrical. icrs.representation_type = 'cylindrical' assert icrs.representation_type is r.CylindricalRepresentation assert icrs_cyl.rho == icrs.rho assert icrs_cyl.phi == icrs.phi assert icrs_cyl.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CylindricalRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = 'WRONG' assert 'but must be a BaseRepresentation class' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = ICRS assert 'but must be a BaseRepresentation class' in str(err.value) def test_represent_as(): icrs = ICRS(ra=1*u.deg, dec=1*u.deg) cart1 = icrs.represent_as('cartesian') cart2 = icrs.represent_as(r.CartesianRepresentation) cart1.x == cart2.x cart1.y == cart2.y cart1.z == cart2.z # now try with velocities icrs = ICRS(ra=0*u.deg, dec=0*u.deg, distance=10*u.kpc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=1*u.km/u.s) # single string rep2 = icrs.represent_as('cylindrical') assert isinstance(rep2, r.CylindricalRepresentation) assert isinstance(rep2.differentials['s'], r.CylindricalDifferential) # single class with positional in_frame_units, verify that warning raised with pytest.warns(AstropyWarning, match='argument position') as w: icrs.represent_as(r.CylindricalRepresentation, False) assert len(w) == 1 # TODO: this should probably fail in the future once we figure out a better # workaround for dealing with UnitSphericalRepresentation's with # RadialDifferential's # two classes # rep2 = icrs.represent_as(r.CartesianRepresentation, # r.SphericalCosLatDifferential) # assert isinstance(rep2, r.CartesianRepresentation) # assert isinstance(rep2.differentials['s'], r.SphericalCosLatDifferential) with pytest.raises(ValueError): icrs.represent_as('odaigahara') def test_shorthand_representations(): rep = r.CartesianRepresentation([1, 2, 3]*u.pc) dif = r.CartesianDifferential([1, 2, 3]*u.km/u.s) rep = rep.with_differentials(dif) icrs = ICRS(rep) cyl = icrs.cylindrical assert isinstance(cyl, r.CylindricalRepresentation) assert isinstance(cyl.differentials['s'], r.CylindricalDifferential) sph = icrs.spherical assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalDifferential) sph = icrs.sphericalcoslat assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalCosLatDifferential) def test_equal(): obstime = 'B1955' sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([1, 20]*u.deg, [3, 4]*u.deg, obstime=obstime) # Compare arrays and scalars eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa (numpy True not Python True) assert (sc1[0] != sc2[0]) == False # noqa # Broadcasting eq = sc1[0] == sc2 ne = sc1[0] != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) # With diff only in velocity sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s) sc2 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 20]*u.km/u.s) eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa assert (sc1[0] != sc2[0]) == False # noqa assert (FK4() == ICRS()) is False assert (FK4() == FK4(obstime='J1999')) is False def test_equal_exceptions(): # Shape mismatch sc1 = FK4([1, 2, 3]*u.deg, [3, 4, 5]*u.deg) with pytest.raises(ValueError, match='cannot compare: shape mismatch'): sc1 == sc1[:2] # Different representation_type sc1 = FK4(1, 2, 3, representation_type='cartesian') sc2 = FK4(1*u.deg, 2*u.deg, 2, representation_type='spherical') with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: CartesianRepresentation vs. SphericalRepresentation'): sc1 == sc2 # Different differential type sc1 = FK4(1*u.deg, 2*u.deg, radial_velocity=1*u.km/u.s) sc2 = FK4(1*u.deg, 2*u.deg, pm_ra_cosdec=1*u.mas/u.yr, pm_dec=1*u.mas/u.yr) with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: RadialDifferential vs. UnitSphericalCosLatDifferential'): sc1 == sc2 # Different frame attribute sc1 = FK5(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J1999') with pytest.raises(TypeError, match=r'cannot compare: objects must have equivalent ' r'frames: <FK5 Frame \(equinox=J2000.000\)> ' r'vs. <FK5 Frame \(equinox=J1999.000\)>'): sc1 == sc2 # Different frame sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') with pytest.raises(TypeError, match='cannot compare: objects must have equivalent ' r'frames: <FK4 Frame \(equinox=B1950.000, obstime=B1950.000\)> ' r'vs. <FK5 Frame \(equinox=J2000.000\)>'): sc1 == sc2 sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK4() with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc1 == sc2 with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc2 == sc1 def test_dynamic_attrs(): c = ICRS(1*u.deg, 2*u.deg) assert 'ra' in dir(c) assert 'dec' in dir(c) with pytest.raises(AttributeError) as err: c.blahblah assert "object has no attribute 'blahblah'" in str(err.value) with pytest.raises(AttributeError) as err: c.ra = 1 assert "Cannot set any frame attribute" in str(err.value) c.blahblah = 1 assert c.blahblah == 1 def test_nodata_error(): i = ICRS() with pytest.raises(ValueError) as excinfo: i.data assert 'does not have associated data' in str(excinfo.value) def test_len0_data(): i = ICRS([]*u.deg, []*u.deg) assert i.has_data repr(i) def test_quantity_attributes(): # make sure we can create a GCRS frame with valid inputs GCRS(obstime='J2002', obsgeoloc=[1, 2, 3]*u.km, obsgeovel=[4, 5, 6]*u.km/u.s) # make sure it fails for invalid lovs or vels with pytest.raises(TypeError): GCRS(obsgeoloc=[1, 2, 3]) # no unit with pytest.raises(u.UnitsError): GCRS(obsgeoloc=[1, 2, 3]*u.km/u.s) # incorrect unit with pytest.raises(ValueError): GCRS(obsgeoloc=[1, 3]*u.km) # incorrect shape def test_quantity_attribute_default(): # The default default (yes) is None: class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.deg) frame = MyCoord() assert frame.someval is None frame = MyCoord(someval=15*u.deg) assert u.isclose(frame.someval, 15*u.deg) # This should work if we don't explicitly pass in a unit, but we pass in a # default value with a unit class MyCoord2(BaseCoordinateFrame): someval = QuantityAttribute(15*u.deg) frame = MyCoord2() assert u.isclose(frame.someval, 15*u.deg) # Since here no shape was given, we can set to any shape we like. frame = MyCoord2(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert np.all(frame.someval == 1*u.deg) # We should also be able to insist on a given shape. class MyCoord3(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.arcsec, shape=(3,)) frame = MyCoord3(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert frame.someval.unit == u.arcsec assert u.allclose(frame.someval.value, 3600.) # The wrong shape raises. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=1.*u.deg) # As does the wrong unit. with pytest.raises(u.UnitsError): MyCoord3(someval=np.ones(3)*u.m) # We are allowed a short-cut for zero. frame0 = MyCoord3(someval=0) assert frame0.someval.shape == (3,) assert frame0.someval.unit == u.arcsec assert np.all(frame0.someval.value == 0.) # But not if it has the wrong shape. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=np.zeros(2)) # This should fail, if we don't pass in a default or a unit with pytest.raises(ValueError): class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute() def test_eloc_attributes(): el = EarthLocation(lon=12.3*u.deg, lat=45.6*u.deg, height=1*u.km) it = ITRS(r.SphericalRepresentation(lon=12.3*u.deg, lat=45.6*u.deg, distance=1*u.km)) gc = GCRS(ra=12.3*u.deg, dec=45.6*u.deg, distance=6375*u.km) el1 = AltAz(location=el).location assert isinstance(el1, EarthLocation) # these should match *exactly* because the EarthLocation assert el1.lat == el.lat assert el1.lon == el.lon assert el1.height == el.height el2 = AltAz(location=it).location assert isinstance(el2, EarthLocation) # these should *not* match because giving something in Spherical ITRS is # *not* the same as giving it as an EarthLocation: EarthLocation is on an # elliptical geoid. So the longitude should match (because flattening is # only along the z-axis), but latitude should not. Also, height is relative # to the *surface* in EarthLocation, but the ITRS distance is relative to # the center of the Earth assert not allclose(el2.lat, it.spherical.lat) assert allclose(el2.lon, it.spherical.lon) assert el2.height < -6000*u.km el3 = AltAz(location=gc).location # GCRS inputs implicitly get transformed to ITRS and then onto # EarthLocation's elliptical geoid. So both lat and lon shouldn't match assert isinstance(el3, EarthLocation) assert not allclose(el3.lat, gc.dec) assert not allclose(el3.lon, gc.ra) assert np.abs(el3.height) < 500*u.km def test_equivalent_frames(): i = ICRS() i2 = ICRS(1*u.deg, 2*u.deg) assert i.is_equivalent_frame(i) assert i.is_equivalent_frame(i2) with pytest.raises(TypeError): assert i.is_equivalent_frame(10) with pytest.raises(TypeError): assert i2.is_equivalent_frame(SkyCoord(i2)) f0 = FK5() # this J2000 is TT f1 = FK5(equinox='J2000') f2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') f3 = FK5(equinox='J2010') f4 = FK4(equinox='J2010') assert f1.is_equivalent_frame(f1) assert not i.is_equivalent_frame(f1) assert f0.is_equivalent_frame(f1) assert f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f3.is_equivalent_frame(f4) aa1 = AltAz() aa2 = AltAz(obstime='J2010') assert aa2.is_equivalent_frame(aa2) assert not aa1.is_equivalent_frame(i) assert not aa1.is_equivalent_frame(aa2) def test_equivalent_frame_coordinateattribute(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) # These frames should not be considered equivalent f0 = FrameWithCoordinateAttribute() f1 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2000')) f2 = FrameWithCoordinateAttribute(coord_attr=HCRS(3*u.deg, 4*u.deg, obstime='J2000')) f3 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2001')) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) assert not f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f2.is_equivalent_frame(f3) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) assert f2.is_equivalent_frame(deepcopy(f2)) assert f3.is_equivalent_frame(deepcopy(f3)) def test_equivalent_frame_locationattribute(): class FrameWithLocationAttribute(BaseCoordinateFrame): loc_attr = EarthLocationAttribute() # These frames should not be considered equivalent f0 = FrameWithLocationAttribute() location = EarthLocation(lat=-34, lon=19, height=300) f1 = FrameWithLocationAttribute(loc_attr=location) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) def test_representation_subclass(): # Regression test for #3354 # Normally when instantiating a frame without a distance the frame will try # and use UnitSphericalRepresentation internally instead of # SphericalRepresentation. frame = FK5(representation_type=r.SphericalRepresentation, ra=32 * u.deg, dec=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == r.SphericalRepresentation # If using a SphericalRepresentation class this used to not work, so we # test here that this is now fixed. class NewSphericalRepresentation(r.SphericalRepresentation): attr_classes = r.SphericalRepresentation.attr_classes frame = FK5(representation_type=NewSphericalRepresentation, lon=32 * u.deg, lat=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == NewSphericalRepresentation # A similar issue then happened in __repr__ with subclasses of # SphericalRepresentation. assert repr(frame) == ("<FK5 Coordinate (equinox=J2000.000): (lon, lat) in deg\n" " (32., 20.)>") # A more subtle issue is when specifying a custom # UnitSphericalRepresentation subclass for the data and # SphericalRepresentation or a subclass for the representation. class NewUnitSphericalRepresentation(r.UnitSphericalRepresentation): attr_classes = r.UnitSphericalRepresentation.attr_classes def __repr__(self): return "<NewUnitSphericalRepresentation: spam spam spam>" frame = FK5(NewUnitSphericalRepresentation(lon=32 * u.deg, lat=20 * u.deg), representation_type=NewSphericalRepresentation) assert repr(frame) == "<FK5 Coordinate (equinox=J2000.000): spam spam spam>" def test_getitem_representation(): """ Make sure current representation survives __getitem__ even if different from data representation. """ c = ICRS([1, 1] * u.deg, [2, 2] * u.deg) c.representation_type = 'cartesian' assert c[0].representation_type is r.CartesianRepresentation def test_component_error_useful(): """ Check that a data-less frame gives useful error messages about not having data when the attributes asked for are possible coordinate components """ i = ICRS() with pytest.raises(ValueError) as excinfo: i.ra assert 'does not have associated data' in str(excinfo.value) with pytest.raises(AttributeError) as excinfo1: i.foobar with pytest.raises(AttributeError) as excinfo2: i.lon # lon is *not* the component name despite being the underlying representation's name assert "object has no attribute 'foobar'" in str(excinfo1.value) assert "object has no attribute 'lon'" in str(excinfo2.value) def test_cache_clear(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) assert len(i.cache['representation']) == 2 i.cache.clear() assert len(i.cache['representation']) == 0 def test_inplace_array(): i = ICRS([[1, 2], [3, 4]]*u.deg, [[10, 20], [30, 40]]*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[:, 0] = [100, 200]*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert_allclose(i.ra, [[100, 2], [200, 4]]*u.deg) assert_allclose(i.dec, [[10, 20], [30, 40]]*u.deg) def test_inplace_change(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[()] = 10*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert i.ra == 10 * u.deg assert i.dec == 2 * u.deg def test_representation_with_multiple_differentials(): dif1 = r.CartesianDifferential([1, 2, 3]*u.km/u.s) dif2 = r.CartesianDifferential([1, 2, 3]*u.km/u.s**2) rep = r.CartesianRepresentation([1, 2, 3]*u.pc, differentials={'s': dif1, 's2': dif2}) # check warning is raised for a scalar with pytest.raises(ValueError): ICRS(rep) def test_representation_arg_backwards_compatibility(): # TODO: this test can be removed when the `representation` argument is # removed from the BaseCoordinateFrame initializer. c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c2 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c3 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian') assert c1.x == c2.x assert c1.y == c2.y assert c1.z == c2.z assert c1.x == c3.x assert c1.y == c3.y assert c1.z == c3.z assert c1.representation_type == c1.representation_type with pytest.raises(ValueError): ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian', representation='cartesian') def test_missing_component_error_names(): """ This test checks that the component names are frame component names, not representation or differential names, when referenced in an exception raised when not passing in enough data. For example: ICRS(ra=10*u.deg) should state: TypeError: __init__() missing 1 required positional argument: 'dec' """ with pytest.raises(TypeError) as e: ICRS(ra=150 * u.deg) assert "missing 1 required positional argument: 'dec'" in str(e.value) with pytest.raises(TypeError) as e: ICRS(ra=150*u.deg, dec=-11*u.deg, pm_ra=100*u.mas/u.yr, pm_dec=10*u.mas/u.yr) assert "pm_ra_cosdec" in str(e.value) def test_non_spherical_representation_unit_creation(unitphysics): class PhysicsICRS(ICRS): default_representation = r.PhysicsSphericalRepresentation pic = PhysicsICRS(phi=1*u.deg, theta=25*u.deg, r=1*u.kpc) assert isinstance(pic.data, r.PhysicsSphericalRepresentation) picu = PhysicsICRS(phi=1*u.deg, theta=25*u.deg) assert isinstance(picu.data, unitphysics) def test_attribute_repr(): class Spam: def _astropy_repr_in_frame(self): return "TEST REPR" class TestFrame(BaseCoordinateFrame): attrtest = Attribute(default=Spam()) assert "TEST REPR" in repr(TestFrame()) def test_component_names_repr(): # Frame class with new component names that includes a name swap class NameChangeFrame(BaseCoordinateFrame): default_representation = r.PhysicsSphericalRepresentation frame_specific_representation_info = { r.PhysicsSphericalRepresentation: [ RepresentationMapping('phi', 'theta', u.deg), RepresentationMapping('theta', 'phi', u.arcsec), RepresentationMapping('r', 'JUSTONCE', u.AU)] } frame = NameChangeFrame(0*u.deg, 0*u.arcsec, 0*u.AU) # Check for the new names in the Frame repr assert "(theta, phi, JUSTONCE)" in repr(frame) # Check that the letter "r" has not been replaced more than once in the Frame repr assert repr(frame).count("JUSTONCE") == 1 @pytest.fixture def reset_galactocentric_defaults(): # TODO: this can be removed, along with the "warning" test below, once we # switch the default to 'latest' in v4.1 # Resets before each test, and after (the yield is pytest magic) galactocentric_frame_defaults.set('v4.0') yield galactocentric_frame_defaults.set('v4.0') def test_galactocentric_defaults(reset_galactocentric_defaults): with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() with galactocentric_frame_defaults.set('latest'): galcen_latest = Galactocentric() # parameters that changed assert not u.allclose(galcen_pre40.galcen_distance, galcen_40.galcen_distance) assert not u.allclose(galcen_pre40.z_sun, galcen_40.z_sun) for k in galcen_40.get_frame_attr_names(): if isinstance(getattr(galcen_40, k), BaseCoordinateFrame): continue # skip coordinate comparison... elif isinstance(getattr(galcen_40, k), CartesianDifferential): assert u.allclose(getattr(galcen_40, k).d_xyz, getattr(galcen_latest, k).d_xyz) else: assert getattr(galcen_40, k) == getattr(galcen_latest, k) # test validate Galactocentric with galactocentric_frame_defaults.set('latest'): params = galactocentric_frame_defaults.validate(galcen_latest) references = galcen_latest.frame_attribute_references state = dict(parameters=params, references=references) assert galactocentric_frame_defaults.parameters == params assert galactocentric_frame_defaults.references == references assert galactocentric_frame_defaults._state == state # Test not one of accepted parameter types with pytest.raises(ValueError): galactocentric_frame_defaults.validate(ValueError) # test parameters property assert ( galactocentric_frame_defaults.parameters == galactocentric_frame_defaults.parameters ) def test_galactocentric_references(reset_galactocentric_defaults): # references in the "scientific paper"-sense with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() for k in galcen_pre40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_pre40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() for k in galcen_40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_custom = Galactocentric(z_sun=15*u.pc) for k in galcen_custom.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue if k == 'z_sun': assert k not in galcen_custom.frame_attribute_references else: assert k in galcen_custom.frame_attribute_references def test_coordinateattribute_transformation(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) hcrs = HCRS(1*u.deg, 2*u.deg, 3*u.AU, obstime='2001-02-03') f1_frame = FrameWithCoordinateAttribute(coord_attr=hcrs) f1_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(hcrs)) # The input is already HCRS, so the frame attribute should not change it assert f1_frame.coord_attr == hcrs # The output should not be different if a SkyCoord is provided assert f1_skycoord.coord_attr == f1_frame.coord_attr gcrs = GCRS(4*u.deg, 5*u.deg, 6*u.AU, obstime='2004-05-06') f2_frame = FrameWithCoordinateAttribute(coord_attr=gcrs) f2_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(gcrs)) # The input needs to be converted from GCRS to HCRS assert isinstance(f2_frame.coord_attr, HCRS) # The `obstime` frame attribute should have been "merged" in a SkyCoord-style transformation assert f2_frame.coord_attr.obstime == gcrs.obstime # The output should not be different if a SkyCoord is provided assert f2_skycoord.coord_attr == f2_frame.coord_attr def test_realize_frame_accepts_kwargs(): c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) new_data = r.CartesianRepresentation(x=11*u.pc, y=12*u.pc, z=13*u.pc) c2 = c1.realize_frame(new_data, representation_type="cartesian") c3 = c1.realize_frame(new_data, representation_type="cylindrical") assert c2.representation_type == r.CartesianRepresentation assert c3.representation_type == r.CylindricalRepresentation def test_nameless_frame_subclass(): """Note: this is a regression test for #11096""" class Test: pass # Subclass from a frame class and a non-frame class. # This subclassing is the test! class NewFrame(ICRS, Test): pass
astropy/astropy
astropy/coordinates/tests/test_frames.py
astropy/coordinates/orbital_elements.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst import timeit import numpy as np # pylint: disable=W0611 # largest image size to use for "linear" and fft convolutions max_exponents_linear = {1: 15, 2: 7, 3: 5} max_exponents_fft = {1: 15, 2: 10, 3: 7} if __name__ == "__main__": for ndims in [1, 2, 3]: print("\n{}-dimensional arrays ('n' is the size of the image AND " "the kernel)".format(ndims)) print(" ".join(["%17s" % n for n in ("n", "convolve", "convolve_fft")])) for ii in range(3, max_exponents_fft[ndims]): # array = np.random.random([2**ii]*ndims) # test ODD sizes too if ii < max_exponents_fft[ndims]: setup = (""" import numpy as np from astropy.convolution.convolve import convolve from astropy.convolution.convolve import convolve_fft array = np.random.random([%i]*%i) kernel = np.random.random([%i]*%i)""") % (2 ** ii - 1, ndims, 2 ** ii - 1, ndims) print("%16i:" % (int(2 ** ii - 1)), end=' ') if ii <= max_exponents_linear[ndims]: for convolve_type, extra in zip(("", "_fft"), ("", "fft_pad=False")): statement = f"convolve{convolve_type}(array, kernel, boundary='fill', {extra})" besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) print(f"{besttime:17f}", end=' ') else: print("%17s" % "skipped", end=' ') statement = "convolve_fft(array, kernel, boundary='fill')" besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) print(f"{besttime:17f}", end=' ') print() setup = (""" import numpy as np from astropy.convolution.convolve import convolve from astropy.convolution.convolve import convolve_fft array = np.random.random([%i]*%i) kernel = np.random.random([%i]*%i)""") % (2 ** ii, ndims, 2 ** ii, ndims) print("%16i:" % (int(2 ** ii)), end=' ') if ii <= max_exponents_linear[ndims]: for convolve_type in ("", "_fft",): # convolve doesn't allow even-sized kernels if convolve_type == "": print("%17s" % ("-"), end=' ') else: statement = f"convolve{convolve_type}(array, kernel, boundary='fill')" besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) print(f"{besttime:17f}", end=' ') else: print("%17s" % "skipped", end=' ') statement = "convolve_fft(array, kernel, boundary='fill')" besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) print(f"{besttime:17f}", end=' ') print() """ Unfortunately, these tests are pretty strongly inconclusive NOTE: Runtime has units seconds and represents wall clock time. RESULTS on a late 2013 Mac Pro: 3.5 GHz 6-Core Intel Xeon E5 32 GB 1866 MHz DDR3 ECC Python 3.5.4 :: Anaconda custom (x86_64) clang version 6.0.0 (tags/RELEASE_600/final) llvm-opnemp r327556 | grokos | 2018-03-14 15:11:36 -0400 (Wed, 14 Mar 2018) With OpenMP (hyperthreaded 12procs), convolve() only: 1-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fft 7: 0.002895 0.007321 15: 0.002684 0.008028 31: 0.002733 0.008684 63: 0.002728 0.009127 127: 0.002851 0.012659 255: 0.002835 0.010550 511: 0.003051 0.017137 1023: 0.004042 0.019384 2047: 0.007371 0.049246 4095: 0.021903 0.039821 8191: 0.067098 8.335749 16383: 0.256072 0.272165 2-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fft 7: 0.002696 0.014745 15: 0.002839 0.014826 31: 0.004286 0.045167 63: 0.022941 0.063715 127: 0.325557 0.925577 255: skipped 0.694621 511: skipped 3.734946 3-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fft 7: 0.003502 0.033121 8: 0.003407 0.030351 15: 0.026338 0.062235 31: 1.239503 1.586930 63: skipped 10.792675 With OpenMP but single threaded (n_threads = 1), convolve() only: 1-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fft 7: 0.001754 0.004687 15: 0.001706 0.005133 31: 0.001744 0.005381 63: 0.001725 0.005582 127: 0.001801 0.007405 255: 0.002262 0.006528 511: 0.003866 0.009913 1023: 0.009820 0.011511 2047: 0.034707 0.028171 4095: 0.132908 0.024133 8191: 0.527692 8.311933 16383: 2.103046 0.269368 2-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fft 7: 0.001734 0.009458 15: 0.002336 0.010310 31: 0.009123 0.025427 63: 0.126701 0.040610 127: 2.126114 0.926549 255: skipped 0.690896 511: skipped 3.756475 3-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fft 7: 0.002822 0.019498 15: 0.096008 0.063744 31: 7.373533 1.578913 63: skipped 10.811530 RESULTS on a 2011 Mac Air: 1-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fftnp convolve_fftw convolve_fftsp 7: 0.000408 0.002334 0.005571 0.002677 15: 0.000361 0.002491 0.005648 0.002678 31: 0.000535 0.002450 0.005988 0.002880 63: 0.000509 0.002876 0.008003 0.002981 127: 0.000801 0.004080 0.008513 0.003932 255: 0.002453 0.003111 0.007518 0.003564 511: 0.008394 0.006224 0.010247 0.005991 1023: 0.028741 0.007538 0.009591 0.007696 2047: 0.106323 0.021575 0.022041 0.020682 4095: 0.411936 0.021675 0.019761 0.020939 8191: 1.664517 8.278320 0.073001 7.803563 16383: 6.654678 0.251661 0.202271 0.222171 2-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fftnp convolve_fftw convolve_fftsp 7: 0.000552 0.003524 0.006667 0.004318 15: 0.002986 0.005093 0.012941 0.005951 31: 0.074360 0.033973 0.031800 0.036937 63: 0.848471 0.057407 0.052192 0.053213 127: 14.656414 1.005329 0.402113 0.955279 255: skipped 1.715546 1.566876 1.745338 511: skipped 4.066155 4.303350 3.930661 3-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fftnp convolve_fftw convolve_fftsp 7: 0.009239 0.012957 0.011957 0.015997 15: 0.772434 0.075621 0.056711 0.079508 31: 62.824051 2.295193 1.189505 2.351136 63: skipped 11.250225 10.982726 10.585744 On a 2009 Mac Pro: 1-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fftnp convolve_fftw convolve_fftsp 7: 0.000360 0.002269 0.004986 0.002476 15: 0.000364 0.002255 0.005244 0.002471 31: 0.000385 0.002380 0.005422 0.002588 63: 0.000474 0.002407 0.005392 0.002637 127: 0.000752 0.004122 0.007827 0.003966 255: 0.004316 0.003258 0.006566 0.003324 511: 0.011517 0.007158 0.009898 0.006238 1023: 0.034105 0.009211 0.009468 0.008260 2047: 0.113620 0.028097 0.020662 0.021603 4095: 0.403373 0.023211 0.018767 0.020065 8191: 1.519329 8.454573 0.211436 7.212381 16383: 5.887481 0.317428 0.153344 0.237119 2-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fftnp convolve_fftw convolve_fftsp 7: 0.000474 0.003470 0.006131 0.003503 15: 0.002011 0.004481 0.007825 0.004496 31: 0.027291 0.019433 0.014841 0.018034 63: 0.445680 0.038171 0.026753 0.037404 127: 7.003774 0.925921 0.282591 0.762671 255: skipped 0.804682 0.708849 0.869368 511: skipped 3.643626 3.687562 4.584770 3-dimensional arrays ('n' is the size of the image AND the kernel) n convolve convolve_fftnp convolve_fftw convolve_fftsp 7: 0.004520 0.011519 0.009464 0.012335 15: 0.329566 0.060978 0.045495 0.073692 31: 24.935228 1.654920 0.710509 1.773879 63: skipped 8.982771 12.407683 16.900078 """
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from copy import deepcopy import numpy as np import pytest import re from astropy import units as u from astropy.units import allclose from astropy.tests.helper import assert_quantity_allclose as assert_allclose from astropy.utils.exceptions import AstropyWarning from astropy.time import Time from astropy.coordinates import ( EarthLocation, galactocentric_frame_defaults, representation as r, SkyCoord, ) from astropy.coordinates.attributes import ( Attribute, CoordinateAttribute, DifferentialAttribute, EarthLocationAttribute, QuantityAttribute, TimeAttribute, ) from astropy.coordinates.baseframe import ( BaseCoordinateFrame, RepresentationMapping ) from astropy.coordinates.builtin_frames import ( AltAz, HADec, FK4, FK5, Galactic, Galactocentric, GCRS, HCRS, ICRS, ITRS ) from astropy.coordinates.representation import ( CartesianDifferential, REPRESENTATION_CLASSES, ) from .test_representation import unitphysics # this fixture is used below # noqa def setup_function(func): """Copy original 'REPRESENTATIONCLASSES' as attribute in function.""" func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES) def teardown_function(func): """Reset REPRESENTATION_CLASSES to original value.""" REPRESENTATION_CLASSES.clear() REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG) def test_frame_attribute_descriptor(): """Unit tests of the Attribute descriptor.""" class TestAttributes: attr_none = Attribute() attr_2 = Attribute(default=2) attr_3_attr2 = Attribute(default=3, secondary_attribute='attr_2') attr_none_attr2 = Attribute(default=None, secondary_attribute='attr_2') attr_none_nonexist = Attribute( default=None, secondary_attribute='nonexist' ) t = TestAttributes() # Defaults assert t.attr_none is None assert t.attr_2 == 2 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 assert t.attr_none_nonexist is None # No default and non-existent secondary attr # Setting values via '_'-prefixed internal vars # (as would normally done in __init__) t._attr_none = 10 assert t.attr_none == 10 t._attr_2 = 20 assert t.attr_2 == 20 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 t._attr_none_attr2 = 40 assert t.attr_none_attr2 == 40 # Make sure setting values via public attribute fails with pytest.raises(AttributeError) as err: t.attr_none = 5 assert 'Cannot set frame attribute' in str(err.value) def test_frame_subclass_attribute_descriptor(): """Unit test of the attribute descriptors in subclasses.""" _EQUINOX_B1980 = Time('B1980', scale='tai') class MyFK4(FK4): # equinox inherited from FK4, obstime overridden, and newattr is new obstime = TimeAttribute(default=_EQUINOX_B1980) newattr = Attribute(default='newattr') mfk4 = MyFK4() assert mfk4.equinox.value == 'B1950.000' assert mfk4.obstime.value == 'B1980.000' assert mfk4.newattr == 'newattr' assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr']) mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world') assert mfk4.equinox.value == 'J1980.000' assert mfk4.obstime.value == 'J1990.000' assert mfk4.newattr == 'world' def test_frame_multiple_inheritance_attribute_descriptor(): """ Ensure that all attributes are accumulated in case of inheritance from multiple BaseCoordinateFrames. See https://github.com/astropy/astropy/pull/11099#issuecomment-735829157 """ class Frame1(BaseCoordinateFrame): attr1 = Attribute() class Frame2(BaseCoordinateFrame): attr2 = Attribute() class Frame3(Frame1, Frame2): pass assert len(Frame3.frame_attributes) == 2 assert 'attr1' in Frame3.frame_attributes assert 'attr2' in Frame3.frame_attributes # In case the same attribute exists in both frames, the one from the # left-most class in the MRO should take precedence class Frame4(BaseCoordinateFrame): attr1 = Attribute() attr2 = Attribute() class Frame5(Frame1, Frame4): pass assert Frame5.frame_attributes['attr1'] is Frame1.frame_attributes['attr1'] assert Frame5.frame_attributes['attr2'] is Frame4.frame_attributes['attr2'] def test_differentialattribute(): # Test logic of passing input through to allowed class vel = [1, 2, 3]*u.km/u.s dif = r.CartesianDifferential(vel) class TestFrame(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential]) frame1 = TestFrame() frame2 = TestFrame(attrtest=dif) frame3 = TestFrame(attrtest=vel) assert np.all(frame1.attrtest.d_xyz == frame2.attrtest.d_xyz) assert np.all(frame1.attrtest.d_xyz == frame3.attrtest.d_xyz) # This shouldn't work if there is more than one allowed class: class TestFrame2(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential, r.CylindricalDifferential]) frame1 = TestFrame2() frame2 = TestFrame2(attrtest=dif) with pytest.raises(TypeError): TestFrame2(attrtest=vel) def test_create_data_frames(): # from repr i1 = ICRS(r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)) i2 = ICRS(r.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg)) # from preferred name i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) i4 = ICRS(ra=1*u.deg, dec=2*u.deg) assert i1.data.lat == i3.data.lat assert i1.data.lon == i3.data.lon assert i1.data.distance == i3.data.distance assert i2.data.lat == i4.data.lat assert i2.data.lon == i4.data.lon # now make sure the preferred names work as properties assert_allclose(i1.ra, i3.ra) assert_allclose(i2.ra, i4.ra) assert_allclose(i1.distance, i3.distance) with pytest.raises(AttributeError): i1.ra = [11.]*u.deg def test_create_orderered_data(): TOL = 1e-10*u.deg i = ICRS(1*u.deg, 2*u.deg) assert (i.ra - 1*u.deg) < TOL assert (i.dec - 2*u.deg) < TOL g = Galactic(1*u.deg, 2*u.deg) assert (g.l - 1*u.deg) < TOL assert (g.b - 2*u.deg) < TOL a = AltAz(1*u.deg, 2*u.deg) assert (a.az - 1*u.deg) < TOL assert (a.alt - 2*u.deg) < TOL with pytest.raises(TypeError): ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg) with pytest.raises(TypeError): sph = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) ICRS(sph, 1*u.deg, 2*u.deg) def test_create_nodata_frames(): i = ICRS() assert len(i.get_frame_attr_names()) == 0 f5 = FK5() assert f5.equinox == FK5.get_frame_attr_names()['equinox'] f4 = FK4() assert f4.equinox == FK4.get_frame_attr_names()['equinox'] # obstime is special because it's a property that uses equinox if obstime is not set assert f4.obstime in (FK4.get_frame_attr_names()['obstime'], FK4.get_frame_attr_names()['equinox']) def test_no_data_nonscalar_frames(): a1 = AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3, 1)) * u.deg_C) assert a1.obstime.shape == (3, 10) assert a1.temperature.shape == (3, 10) assert a1.shape == (3, 10) with pytest.raises(ValueError) as exc: AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3,)) * u.deg_C) assert 'inconsistent shapes' in str(exc.value) def test_frame_repr(): i = ICRS() assert repr(i) == '<ICRS Frame>' f5 = FK5() assert repr(f5).startswith('<FK5 Frame (equinox=') i2 = ICRS(ra=1*u.deg, dec=2*u.deg) i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' (1., 2., 3.)>') # try with arrays i2 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[2.1, 3.1]*u.deg) i3 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[-15.6, 17.1]*u.deg, distance=[11., 21.]*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' [(1.1, 2.1), (2.1, 3.1)]>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' [(1.1, -15.6, 11.), (2.1, 17.1, 21.)]>') def test_frame_repr_vels(): i = ICRS(ra=1*u.deg, dec=2*u.deg, pm_ra_cosdec=1*u.marcsec/u.yr, pm_dec=2*u.marcsec/u.yr) # unit comes out as mas/yr because of the preferred units defined in the # frame RepresentationMapping assert repr(i) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)\n' ' (pm_ra_cosdec, pm_dec) in mas / yr\n' ' (1., 2.)>') def test_converting_units(): # this is a regular expression that with split (see below) removes what's # the decimal point to fix rounding problems rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)') # Use values that aren't subject to rounding down to X.9999... i2 = ICRS(ra=2.*u.deg, dec=2.*u.deg) i2_many = ICRS(ra=[2., 4.]*u.deg, dec=[2., -8.1]*u.deg) # converting from FK5 to ICRS and back changes the *internal* representation, # but it should still come out in the preferred form i4 = i2.transform_to(FK5()).transform_to(ICRS()) i4_many = i2_many.transform_to(FK5()).transform_to(ICRS()) ri2 = ''.join(rexrepr.split(repr(i2))) ri4 = ''.join(rexrepr.split(repr(i4))) assert ri2 == ri4 assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed ri2_many = ''.join(rexrepr.split(repr(i2_many))) ri4_many = ''.join(rexrepr.split(repr(i4_many))) assert ri2_many == ri4_many assert i2_many.data.lon.unit != i4_many.data.lon.unit # Internal repr changed # but that *shouldn't* hold if we turn off units for the representation class FakeICRS(ICRS): frame_specific_representation_info = { 'spherical': [RepresentationMapping('lon', 'ra', u.hourangle), RepresentationMapping('lat', 'dec', None), RepresentationMapping('distance', 'distance')] # should fall back to default of None unit } fi = FakeICRS(i4.data) ri2 = ''.join(rexrepr.split(repr(i2))) rfi = ''.join(rexrepr.split(repr(fi))) rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match assert ri2 != rfi # the attributes should also get the right units assert i2.dec.unit == i4.dec.unit # unless no/explicitly given units assert i2.dec.unit != fi.dec.unit assert i2.ra.unit != fi.ra.unit assert fi.ra.unit == u.hourangle def test_representation_info(): class NewICRS1(ICRS): frame_specific_representation_info = { r.SphericalRepresentation: [ RepresentationMapping('lon', 'rara', u.hourangle), RepresentationMapping('lat', 'decdec', u.degree), RepresentationMapping('distance', 'distance', u.kpc)] } i1 = NewICRS1(rara=10*u.degree, decdec=-12*u.deg, distance=1000*u.pc, pm_rara_cosdecdec=100*u.mas/u.yr, pm_decdec=17*u.mas/u.yr, radial_velocity=10*u.km/u.s) assert allclose(i1.rara, 10*u.deg) assert i1.rara.unit == u.hourangle assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.distance, 1000*u.pc) assert i1.distance.unit == u.kpc assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # this should auto-set the names of UnitSpherical: i1.set_representation_cls(r.UnitSphericalRepresentation, s=r.UnitSphericalCosLatDifferential) assert allclose(i1.rara, 10*u.deg) assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # For backwards compatibility, we also support the string name in the # representation info dictionary: class NewICRS2(ICRS): frame_specific_representation_info = { 'spherical': [ RepresentationMapping('lon', 'ang1', u.hourangle), RepresentationMapping('lat', 'ang2', u.degree), RepresentationMapping('distance', 'howfar', u.kpc)] } i2 = NewICRS2(ang1=10*u.degree, ang2=-12*u.deg, howfar=1000*u.pc) assert allclose(i2.ang1, 10*u.deg) assert i2.ang1.unit == u.hourangle assert allclose(i2.ang2, -12*u.deg) assert allclose(i2.howfar, 1000*u.pc) assert i2.howfar.unit == u.kpc # Test that the differential kwargs get overridden class NewICRS3(ICRS): frame_specific_representation_info = { r.SphericalCosLatDifferential: [ RepresentationMapping('d_lon_coslat', 'pm_ang1', u.hourangle/u.year), RepresentationMapping('d_lat', 'pm_ang2'), RepresentationMapping('d_distance', 'vlos', u.kpc/u.Myr)] } i3 = NewICRS3(lon=10*u.degree, lat=-12*u.deg, distance=1000*u.pc, pm_ang1=1*u.mas/u.yr, pm_ang2=2*u.mas/u.yr, vlos=100*u.km/u.s) assert allclose(i3.pm_ang1, 1*u.mas/u.yr) assert i3.pm_ang1.unit == u.hourangle/u.year assert allclose(i3.pm_ang2, 2*u.mas/u.yr) assert allclose(i3.vlos, 100*u.km/u.s) assert i3.vlos.unit == u.kpc/u.Myr def test_realizing(): rep = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) i = ICRS() i2 = i.realize_frame(rep) assert not i.has_data assert i2.has_data f = FK5(equinox=Time('J2001')) f2 = f.realize_frame(rep) assert not f.has_data assert f2.has_data assert f2.equinox == f.equinox assert f2.equinox != FK5.get_frame_attr_names()['equinox'] # Check that a nicer error message is returned: with pytest.raises(TypeError) as excinfo: f.realize_frame(f.representation_type) assert ('Class passed as data instead of a representation' in excinfo.value.args[0]) def test_replicating(): i = ICRS(ra=[1]*u.deg, dec=[2]*u.deg) icopy = i.replicate(copy=True) irepl = i.replicate(copy=False) i.data._lat[:] = 0*u.deg assert np.all(i.data.lat == irepl.data.lat) assert np.all(i.data.lat != icopy.data.lat) iclone = i.replicate_without_data() assert i.has_data assert not iclone.has_data aa = AltAz(alt=1*u.deg, az=2*u.deg, obstime=Time('J2000')) aaclone = aa.replicate_without_data(obstime=Time('J2001')) assert not aaclone.has_data assert aa.obstime != aaclone.obstime assert aa.pressure == aaclone.pressure assert aa.obswl == aaclone.obswl def test_getitem(): rep = r.SphericalRepresentation( [1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc) i = ICRS(rep) assert len(i.ra) == 3 iidx = i[1:] assert len(iidx.ra) == 2 iidx2 = i[0] assert iidx2.ra.isscalar def test_transform(): """ This test just makes sure the transform architecture works, but does *not* actually test all the builtin transforms themselves are accurate. """ i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ == r.UnitSphericalRepresentation assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ != r.UnitSphericalRepresentation f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f4 = f.transform_to(FK4()) f4_2 = f.transform_to(FK4(equinox=f.equinox)) # make sure attributes are copied over correctly assert f4.equinox == FK4().equinox assert f4_2.equinox == f.equinox # make sure self-transforms also work i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i.transform_to(ICRS()) assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f2 = f.transform_to(FK5()) # default equinox, so should be *different* assert f2.equinox == FK5().equinox with pytest.raises(AssertionError): assert_allclose(f.ra, f2.ra) with pytest.raises(AssertionError): assert_allclose(f.dec, f2.dec) # finally, check Galactic round-tripping i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i1.transform_to(Galactic()).transform_to(ICRS()) assert_allclose(i1.ra, i2.ra) assert_allclose(i1.dec, i2.dec) def test_transform_to_nonscalar_nodata_frame(): # https://github.com/astropy/astropy/pull/5254#issuecomment-241592353 times = Time('2016-08-23') + np.linspace(0, 10, 12)*u.day coo1 = ICRS(ra=[[0.], [10.], [20.]]*u.deg, dec=[[-30.], [30.], [60.]]*u.deg) coo2 = coo1.transform_to(FK5(equinox=times)) assert coo2.shape == (3, 12) def test_setitem_no_velocity(): """Test different flavors of item setting for a Frame without a velocity. """ obstime = 'B1955' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = sc0.copy() sc1_repr = repr(sc1) assert 'representation' in sc1.cache sc1[1] = sc2[0] assert sc1.cache == {} assert repr(sc2) != sc1_repr assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) # Works for array-valued obstime so long as they are considered equivalent sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, obstime]) sc1[0] = sc2[0] # Multidimensional coordinates sc1 = FK4([[1, 2], [3, 4]] * u.deg, [[5, 6], [7, 8]] * u.deg) sc2 = FK4([[10, 20], [30, 40]] * u.deg, [[50, 60], [70, 80]] * u.deg) sc1[0] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [[10, 20], [3, 4]]) assert np.allclose(sc1.dec.to_value(u.deg), [[50, 60], [7, 8]]) def test_setitem_velocities(): """Test different flavors of item setting for a Frame with a velocity. """ sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s, obstime='B1950') sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, radial_velocity=[10, 20]*u.km/u.s, obstime='B1950') sc1 = sc0.copy() sc1[1] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [1, 10]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 10]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 20]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [20, 10]) def test_setitem_exceptions(): obstime = 'B1950' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = Galactic(sc0.ra, sc0.dec) with pytest.raises(TypeError, match='can only set from object of same class: ' 'Galactic vs. FK4'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime='B2001') with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra[0], sc0.dec[0], obstime=obstime) with pytest.raises(TypeError, match="scalar 'FK4' frame object does not support " 'item assignment'): sc1[0] = sc2[0] sc1 = FK4(obstime=obstime) with pytest.raises(ValueError, match='cannot set frame which has no data'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] # Wrong shape sc1 = FK4([sc0.ra], [sc0.dec], obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] def test_sep(): i1 = ICRS(ra=0*u.deg, dec=1*u.deg) i2 = ICRS(ra=0*u.deg, dec=2*u.deg) sep = i1.separation(i2) assert sep.deg == 1 i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc) sep3d = i3.separation_3d(i4) assert_allclose(sep3d.to(u.kpc), np.array([1, 1])*u.kpc) # check that it works even with velocities i5 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) i6 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[7, 8]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) sep3d = i5.separation_3d(i6) assert_allclose(sep3d.to(u.kpc), np.array([2, 2])*u.kpc) # 3d separations of dimensionless distances should still work i7 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.one) i8 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=4*u.one) sep3d = i7.separation_3d(i8) assert_allclose(sep3d, 1*u.one) # but should fail with non-dimensionless with pytest.raises(ValueError): i7.separation_3d(i3) def test_time_inputs(): """ Test validation and conversion of inputs for equinox and obstime attributes. """ c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00') assert c.equinox == Time('J2001.5') assert c.obstime == Time('2000-01-01 12:00:00') with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5) assert 'Invalid time input' in str(err.value) with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, obstime='hello') assert 'Invalid time input' in str(err.value) # A vector time should work if the shapes match, but we don't automatically # broadcast the basic data (just like time). FK4([1, 2] * u.deg, [2, 3] * u.deg, obstime=['J2000', 'J2001']) with pytest.raises(ValueError) as err: FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001']) assert 'shape' in str(err.value) def test_is_frame_attr_default(): """ Check that the `is_frame_attr_default` machinery works as expected """ c1 = FK5(ra=1*u.deg, dec=1*u.deg) c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox']) c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5')) assert c1.equinox == c2.equinox assert c1.equinox != c3.equinox assert c1.is_frame_attr_default('equinox') assert not c2.is_frame_attr_default('equinox') assert not c3.is_frame_attr_default('equinox') c4 = c1.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) c5 = c2.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) assert c4.is_frame_attr_default('equinox') assert not c5.is_frame_attr_default('equinox') def test_altaz_attributes(): aa = AltAz(1*u.deg, 2*u.deg) assert aa.obstime is None assert aa.location is None aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000') assert aa2.obstime == Time('J2000') aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert isinstance(aa3.location, EarthLocation) def test_hadec_attributes(): hd = HADec(1*u.hourangle, 2*u.deg) assert hd.ha == 1.*u.hourangle assert hd.dec == 2*u.deg assert hd.obstime is None assert hd.location is None hd2 = HADec(23*u.hourangle, -2*u.deg, obstime='J2000', location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert_allclose(hd2.ha, -1*u.hourangle) assert hd2.dec == -2*u.deg assert hd2.obstime == Time('J2000') assert isinstance(hd2.location, EarthLocation) sr = hd2.represent_as(r.SphericalRepresentation) assert_allclose(sr.lon, -1*u.hourangle) def test_representation(): """ Test the getter and setter properties for `representation` """ # Create the frame object. icrs = ICRS(ra=1*u.deg, dec=1*u.deg) data = icrs.data # Create some representation objects. icrs_cart = icrs.cartesian icrs_spher = icrs.spherical icrs_cyl = icrs.cylindrical # Testing when `_representation` set to `CartesianRepresentation`. icrs.representation_type = r.CartesianRepresentation assert icrs.representation_type == r.CartesianRepresentation assert icrs_cart.x == icrs.x assert icrs_cart.y == icrs.y assert icrs_cart.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CartesianRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing when `_representation` set to `CylindricalRepresentation`. icrs.representation_type = r.CylindricalRepresentation assert icrs.representation_type == r.CylindricalRepresentation assert icrs.data == data # Testing setter input using text argument for spherical. icrs.representation_type = 'spherical' assert icrs.representation_type is r.SphericalRepresentation assert icrs_spher.lat == icrs.dec assert icrs_spher.lon == icrs.ra assert icrs_spher.distance == icrs.distance assert icrs.data == data # Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes. for attr in ('x', 'y', 'z'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing setter input using text argument for cylindrical. icrs.representation_type = 'cylindrical' assert icrs.representation_type is r.CylindricalRepresentation assert icrs_cyl.rho == icrs.rho assert icrs_cyl.phi == icrs.phi assert icrs_cyl.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CylindricalRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = 'WRONG' assert 'but must be a BaseRepresentation class' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = ICRS assert 'but must be a BaseRepresentation class' in str(err.value) def test_represent_as(): icrs = ICRS(ra=1*u.deg, dec=1*u.deg) cart1 = icrs.represent_as('cartesian') cart2 = icrs.represent_as(r.CartesianRepresentation) cart1.x == cart2.x cart1.y == cart2.y cart1.z == cart2.z # now try with velocities icrs = ICRS(ra=0*u.deg, dec=0*u.deg, distance=10*u.kpc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=1*u.km/u.s) # single string rep2 = icrs.represent_as('cylindrical') assert isinstance(rep2, r.CylindricalRepresentation) assert isinstance(rep2.differentials['s'], r.CylindricalDifferential) # single class with positional in_frame_units, verify that warning raised with pytest.warns(AstropyWarning, match='argument position') as w: icrs.represent_as(r.CylindricalRepresentation, False) assert len(w) == 1 # TODO: this should probably fail in the future once we figure out a better # workaround for dealing with UnitSphericalRepresentation's with # RadialDifferential's # two classes # rep2 = icrs.represent_as(r.CartesianRepresentation, # r.SphericalCosLatDifferential) # assert isinstance(rep2, r.CartesianRepresentation) # assert isinstance(rep2.differentials['s'], r.SphericalCosLatDifferential) with pytest.raises(ValueError): icrs.represent_as('odaigahara') def test_shorthand_representations(): rep = r.CartesianRepresentation([1, 2, 3]*u.pc) dif = r.CartesianDifferential([1, 2, 3]*u.km/u.s) rep = rep.with_differentials(dif) icrs = ICRS(rep) cyl = icrs.cylindrical assert isinstance(cyl, r.CylindricalRepresentation) assert isinstance(cyl.differentials['s'], r.CylindricalDifferential) sph = icrs.spherical assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalDifferential) sph = icrs.sphericalcoslat assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalCosLatDifferential) def test_equal(): obstime = 'B1955' sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([1, 20]*u.deg, [3, 4]*u.deg, obstime=obstime) # Compare arrays and scalars eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa (numpy True not Python True) assert (sc1[0] != sc2[0]) == False # noqa # Broadcasting eq = sc1[0] == sc2 ne = sc1[0] != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) # With diff only in velocity sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s) sc2 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 20]*u.km/u.s) eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa assert (sc1[0] != sc2[0]) == False # noqa assert (FK4() == ICRS()) is False assert (FK4() == FK4(obstime='J1999')) is False def test_equal_exceptions(): # Shape mismatch sc1 = FK4([1, 2, 3]*u.deg, [3, 4, 5]*u.deg) with pytest.raises(ValueError, match='cannot compare: shape mismatch'): sc1 == sc1[:2] # Different representation_type sc1 = FK4(1, 2, 3, representation_type='cartesian') sc2 = FK4(1*u.deg, 2*u.deg, 2, representation_type='spherical') with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: CartesianRepresentation vs. SphericalRepresentation'): sc1 == sc2 # Different differential type sc1 = FK4(1*u.deg, 2*u.deg, radial_velocity=1*u.km/u.s) sc2 = FK4(1*u.deg, 2*u.deg, pm_ra_cosdec=1*u.mas/u.yr, pm_dec=1*u.mas/u.yr) with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: RadialDifferential vs. UnitSphericalCosLatDifferential'): sc1 == sc2 # Different frame attribute sc1 = FK5(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J1999') with pytest.raises(TypeError, match=r'cannot compare: objects must have equivalent ' r'frames: <FK5 Frame \(equinox=J2000.000\)> ' r'vs. <FK5 Frame \(equinox=J1999.000\)>'): sc1 == sc2 # Different frame sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') with pytest.raises(TypeError, match='cannot compare: objects must have equivalent ' r'frames: <FK4 Frame \(equinox=B1950.000, obstime=B1950.000\)> ' r'vs. <FK5 Frame \(equinox=J2000.000\)>'): sc1 == sc2 sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK4() with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc1 == sc2 with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc2 == sc1 def test_dynamic_attrs(): c = ICRS(1*u.deg, 2*u.deg) assert 'ra' in dir(c) assert 'dec' in dir(c) with pytest.raises(AttributeError) as err: c.blahblah assert "object has no attribute 'blahblah'" in str(err.value) with pytest.raises(AttributeError) as err: c.ra = 1 assert "Cannot set any frame attribute" in str(err.value) c.blahblah = 1 assert c.blahblah == 1 def test_nodata_error(): i = ICRS() with pytest.raises(ValueError) as excinfo: i.data assert 'does not have associated data' in str(excinfo.value) def test_len0_data(): i = ICRS([]*u.deg, []*u.deg) assert i.has_data repr(i) def test_quantity_attributes(): # make sure we can create a GCRS frame with valid inputs GCRS(obstime='J2002', obsgeoloc=[1, 2, 3]*u.km, obsgeovel=[4, 5, 6]*u.km/u.s) # make sure it fails for invalid lovs or vels with pytest.raises(TypeError): GCRS(obsgeoloc=[1, 2, 3]) # no unit with pytest.raises(u.UnitsError): GCRS(obsgeoloc=[1, 2, 3]*u.km/u.s) # incorrect unit with pytest.raises(ValueError): GCRS(obsgeoloc=[1, 3]*u.km) # incorrect shape def test_quantity_attribute_default(): # The default default (yes) is None: class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.deg) frame = MyCoord() assert frame.someval is None frame = MyCoord(someval=15*u.deg) assert u.isclose(frame.someval, 15*u.deg) # This should work if we don't explicitly pass in a unit, but we pass in a # default value with a unit class MyCoord2(BaseCoordinateFrame): someval = QuantityAttribute(15*u.deg) frame = MyCoord2() assert u.isclose(frame.someval, 15*u.deg) # Since here no shape was given, we can set to any shape we like. frame = MyCoord2(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert np.all(frame.someval == 1*u.deg) # We should also be able to insist on a given shape. class MyCoord3(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.arcsec, shape=(3,)) frame = MyCoord3(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert frame.someval.unit == u.arcsec assert u.allclose(frame.someval.value, 3600.) # The wrong shape raises. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=1.*u.deg) # As does the wrong unit. with pytest.raises(u.UnitsError): MyCoord3(someval=np.ones(3)*u.m) # We are allowed a short-cut for zero. frame0 = MyCoord3(someval=0) assert frame0.someval.shape == (3,) assert frame0.someval.unit == u.arcsec assert np.all(frame0.someval.value == 0.) # But not if it has the wrong shape. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=np.zeros(2)) # This should fail, if we don't pass in a default or a unit with pytest.raises(ValueError): class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute() def test_eloc_attributes(): el = EarthLocation(lon=12.3*u.deg, lat=45.6*u.deg, height=1*u.km) it = ITRS(r.SphericalRepresentation(lon=12.3*u.deg, lat=45.6*u.deg, distance=1*u.km)) gc = GCRS(ra=12.3*u.deg, dec=45.6*u.deg, distance=6375*u.km) el1 = AltAz(location=el).location assert isinstance(el1, EarthLocation) # these should match *exactly* because the EarthLocation assert el1.lat == el.lat assert el1.lon == el.lon assert el1.height == el.height el2 = AltAz(location=it).location assert isinstance(el2, EarthLocation) # these should *not* match because giving something in Spherical ITRS is # *not* the same as giving it as an EarthLocation: EarthLocation is on an # elliptical geoid. So the longitude should match (because flattening is # only along the z-axis), but latitude should not. Also, height is relative # to the *surface* in EarthLocation, but the ITRS distance is relative to # the center of the Earth assert not allclose(el2.lat, it.spherical.lat) assert allclose(el2.lon, it.spherical.lon) assert el2.height < -6000*u.km el3 = AltAz(location=gc).location # GCRS inputs implicitly get transformed to ITRS and then onto # EarthLocation's elliptical geoid. So both lat and lon shouldn't match assert isinstance(el3, EarthLocation) assert not allclose(el3.lat, gc.dec) assert not allclose(el3.lon, gc.ra) assert np.abs(el3.height) < 500*u.km def test_equivalent_frames(): i = ICRS() i2 = ICRS(1*u.deg, 2*u.deg) assert i.is_equivalent_frame(i) assert i.is_equivalent_frame(i2) with pytest.raises(TypeError): assert i.is_equivalent_frame(10) with pytest.raises(TypeError): assert i2.is_equivalent_frame(SkyCoord(i2)) f0 = FK5() # this J2000 is TT f1 = FK5(equinox='J2000') f2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') f3 = FK5(equinox='J2010') f4 = FK4(equinox='J2010') assert f1.is_equivalent_frame(f1) assert not i.is_equivalent_frame(f1) assert f0.is_equivalent_frame(f1) assert f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f3.is_equivalent_frame(f4) aa1 = AltAz() aa2 = AltAz(obstime='J2010') assert aa2.is_equivalent_frame(aa2) assert not aa1.is_equivalent_frame(i) assert not aa1.is_equivalent_frame(aa2) def test_equivalent_frame_coordinateattribute(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) # These frames should not be considered equivalent f0 = FrameWithCoordinateAttribute() f1 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2000')) f2 = FrameWithCoordinateAttribute(coord_attr=HCRS(3*u.deg, 4*u.deg, obstime='J2000')) f3 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2001')) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) assert not f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f2.is_equivalent_frame(f3) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) assert f2.is_equivalent_frame(deepcopy(f2)) assert f3.is_equivalent_frame(deepcopy(f3)) def test_equivalent_frame_locationattribute(): class FrameWithLocationAttribute(BaseCoordinateFrame): loc_attr = EarthLocationAttribute() # These frames should not be considered equivalent f0 = FrameWithLocationAttribute() location = EarthLocation(lat=-34, lon=19, height=300) f1 = FrameWithLocationAttribute(loc_attr=location) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) def test_representation_subclass(): # Regression test for #3354 # Normally when instantiating a frame without a distance the frame will try # and use UnitSphericalRepresentation internally instead of # SphericalRepresentation. frame = FK5(representation_type=r.SphericalRepresentation, ra=32 * u.deg, dec=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == r.SphericalRepresentation # If using a SphericalRepresentation class this used to not work, so we # test here that this is now fixed. class NewSphericalRepresentation(r.SphericalRepresentation): attr_classes = r.SphericalRepresentation.attr_classes frame = FK5(representation_type=NewSphericalRepresentation, lon=32 * u.deg, lat=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == NewSphericalRepresentation # A similar issue then happened in __repr__ with subclasses of # SphericalRepresentation. assert repr(frame) == ("<FK5 Coordinate (equinox=J2000.000): (lon, lat) in deg\n" " (32., 20.)>") # A more subtle issue is when specifying a custom # UnitSphericalRepresentation subclass for the data and # SphericalRepresentation or a subclass for the representation. class NewUnitSphericalRepresentation(r.UnitSphericalRepresentation): attr_classes = r.UnitSphericalRepresentation.attr_classes def __repr__(self): return "<NewUnitSphericalRepresentation: spam spam spam>" frame = FK5(NewUnitSphericalRepresentation(lon=32 * u.deg, lat=20 * u.deg), representation_type=NewSphericalRepresentation) assert repr(frame) == "<FK5 Coordinate (equinox=J2000.000): spam spam spam>" def test_getitem_representation(): """ Make sure current representation survives __getitem__ even if different from data representation. """ c = ICRS([1, 1] * u.deg, [2, 2] * u.deg) c.representation_type = 'cartesian' assert c[0].representation_type is r.CartesianRepresentation def test_component_error_useful(): """ Check that a data-less frame gives useful error messages about not having data when the attributes asked for are possible coordinate components """ i = ICRS() with pytest.raises(ValueError) as excinfo: i.ra assert 'does not have associated data' in str(excinfo.value) with pytest.raises(AttributeError) as excinfo1: i.foobar with pytest.raises(AttributeError) as excinfo2: i.lon # lon is *not* the component name despite being the underlying representation's name assert "object has no attribute 'foobar'" in str(excinfo1.value) assert "object has no attribute 'lon'" in str(excinfo2.value) def test_cache_clear(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) assert len(i.cache['representation']) == 2 i.cache.clear() assert len(i.cache['representation']) == 0 def test_inplace_array(): i = ICRS([[1, 2], [3, 4]]*u.deg, [[10, 20], [30, 40]]*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[:, 0] = [100, 200]*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert_allclose(i.ra, [[100, 2], [200, 4]]*u.deg) assert_allclose(i.dec, [[10, 20], [30, 40]]*u.deg) def test_inplace_change(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[()] = 10*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert i.ra == 10 * u.deg assert i.dec == 2 * u.deg def test_representation_with_multiple_differentials(): dif1 = r.CartesianDifferential([1, 2, 3]*u.km/u.s) dif2 = r.CartesianDifferential([1, 2, 3]*u.km/u.s**2) rep = r.CartesianRepresentation([1, 2, 3]*u.pc, differentials={'s': dif1, 's2': dif2}) # check warning is raised for a scalar with pytest.raises(ValueError): ICRS(rep) def test_representation_arg_backwards_compatibility(): # TODO: this test can be removed when the `representation` argument is # removed from the BaseCoordinateFrame initializer. c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c2 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c3 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian') assert c1.x == c2.x assert c1.y == c2.y assert c1.z == c2.z assert c1.x == c3.x assert c1.y == c3.y assert c1.z == c3.z assert c1.representation_type == c1.representation_type with pytest.raises(ValueError): ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian', representation='cartesian') def test_missing_component_error_names(): """ This test checks that the component names are frame component names, not representation or differential names, when referenced in an exception raised when not passing in enough data. For example: ICRS(ra=10*u.deg) should state: TypeError: __init__() missing 1 required positional argument: 'dec' """ with pytest.raises(TypeError) as e: ICRS(ra=150 * u.deg) assert "missing 1 required positional argument: 'dec'" in str(e.value) with pytest.raises(TypeError) as e: ICRS(ra=150*u.deg, dec=-11*u.deg, pm_ra=100*u.mas/u.yr, pm_dec=10*u.mas/u.yr) assert "pm_ra_cosdec" in str(e.value) def test_non_spherical_representation_unit_creation(unitphysics): class PhysicsICRS(ICRS): default_representation = r.PhysicsSphericalRepresentation pic = PhysicsICRS(phi=1*u.deg, theta=25*u.deg, r=1*u.kpc) assert isinstance(pic.data, r.PhysicsSphericalRepresentation) picu = PhysicsICRS(phi=1*u.deg, theta=25*u.deg) assert isinstance(picu.data, unitphysics) def test_attribute_repr(): class Spam: def _astropy_repr_in_frame(self): return "TEST REPR" class TestFrame(BaseCoordinateFrame): attrtest = Attribute(default=Spam()) assert "TEST REPR" in repr(TestFrame()) def test_component_names_repr(): # Frame class with new component names that includes a name swap class NameChangeFrame(BaseCoordinateFrame): default_representation = r.PhysicsSphericalRepresentation frame_specific_representation_info = { r.PhysicsSphericalRepresentation: [ RepresentationMapping('phi', 'theta', u.deg), RepresentationMapping('theta', 'phi', u.arcsec), RepresentationMapping('r', 'JUSTONCE', u.AU)] } frame = NameChangeFrame(0*u.deg, 0*u.arcsec, 0*u.AU) # Check for the new names in the Frame repr assert "(theta, phi, JUSTONCE)" in repr(frame) # Check that the letter "r" has not been replaced more than once in the Frame repr assert repr(frame).count("JUSTONCE") == 1 @pytest.fixture def reset_galactocentric_defaults(): # TODO: this can be removed, along with the "warning" test below, once we # switch the default to 'latest' in v4.1 # Resets before each test, and after (the yield is pytest magic) galactocentric_frame_defaults.set('v4.0') yield galactocentric_frame_defaults.set('v4.0') def test_galactocentric_defaults(reset_galactocentric_defaults): with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() with galactocentric_frame_defaults.set('latest'): galcen_latest = Galactocentric() # parameters that changed assert not u.allclose(galcen_pre40.galcen_distance, galcen_40.galcen_distance) assert not u.allclose(galcen_pre40.z_sun, galcen_40.z_sun) for k in galcen_40.get_frame_attr_names(): if isinstance(getattr(galcen_40, k), BaseCoordinateFrame): continue # skip coordinate comparison... elif isinstance(getattr(galcen_40, k), CartesianDifferential): assert u.allclose(getattr(galcen_40, k).d_xyz, getattr(galcen_latest, k).d_xyz) else: assert getattr(galcen_40, k) == getattr(galcen_latest, k) # test validate Galactocentric with galactocentric_frame_defaults.set('latest'): params = galactocentric_frame_defaults.validate(galcen_latest) references = galcen_latest.frame_attribute_references state = dict(parameters=params, references=references) assert galactocentric_frame_defaults.parameters == params assert galactocentric_frame_defaults.references == references assert galactocentric_frame_defaults._state == state # Test not one of accepted parameter types with pytest.raises(ValueError): galactocentric_frame_defaults.validate(ValueError) # test parameters property assert ( galactocentric_frame_defaults.parameters == galactocentric_frame_defaults.parameters ) def test_galactocentric_references(reset_galactocentric_defaults): # references in the "scientific paper"-sense with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() for k in galcen_pre40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_pre40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() for k in galcen_40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_custom = Galactocentric(z_sun=15*u.pc) for k in galcen_custom.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue if k == 'z_sun': assert k not in galcen_custom.frame_attribute_references else: assert k in galcen_custom.frame_attribute_references def test_coordinateattribute_transformation(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) hcrs = HCRS(1*u.deg, 2*u.deg, 3*u.AU, obstime='2001-02-03') f1_frame = FrameWithCoordinateAttribute(coord_attr=hcrs) f1_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(hcrs)) # The input is already HCRS, so the frame attribute should not change it assert f1_frame.coord_attr == hcrs # The output should not be different if a SkyCoord is provided assert f1_skycoord.coord_attr == f1_frame.coord_attr gcrs = GCRS(4*u.deg, 5*u.deg, 6*u.AU, obstime='2004-05-06') f2_frame = FrameWithCoordinateAttribute(coord_attr=gcrs) f2_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(gcrs)) # The input needs to be converted from GCRS to HCRS assert isinstance(f2_frame.coord_attr, HCRS) # The `obstime` frame attribute should have been "merged" in a SkyCoord-style transformation assert f2_frame.coord_attr.obstime == gcrs.obstime # The output should not be different if a SkyCoord is provided assert f2_skycoord.coord_attr == f2_frame.coord_attr def test_realize_frame_accepts_kwargs(): c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) new_data = r.CartesianRepresentation(x=11*u.pc, y=12*u.pc, z=13*u.pc) c2 = c1.realize_frame(new_data, representation_type="cartesian") c3 = c1.realize_frame(new_data, representation_type="cylindrical") assert c2.representation_type == r.CartesianRepresentation assert c3.representation_type == r.CylindricalRepresentation def test_nameless_frame_subclass(): """Note: this is a regression test for #11096""" class Test: pass # Subclass from a frame class and a non-frame class. # This subclassing is the test! class NewFrame(ICRS, Test): pass
astropy/astropy
astropy/coordinates/tests/test_frames.py
astropy/convolution/tests/test_convolve_speeds.py
# Licensed under a 3-clause BSD style license - see PYFITS.rst import os import shutil import stat import tempfile import time from astropy.io import fits class FitsTestCase: def setup(self): self.data_dir = os.path.join(os.path.dirname(__file__), 'data') self.temp_dir = tempfile.mkdtemp(prefix='fits-test-') # Restore global settings to defaults # TODO: Replace this when there's a better way to in the config API to # force config values to their defaults fits.conf.enable_record_valued_keyword_cards = True fits.conf.extension_name_case_sensitive = False fits.conf.strip_header_whitespace = True fits.conf.use_memmap = True def teardown(self): if hasattr(self, 'temp_dir') and os.path.exists(self.temp_dir): tries = 3 while tries: try: shutil.rmtree(self.temp_dir) break except OSError: # Probably couldn't delete the file because for whatever # reason a handle to it is still open/hasn't been # garbage-collected time.sleep(0.5) tries -= 1 fits.conf.reset('enable_record_valued_keyword_cards') fits.conf.reset('extension_name_case_sensitive') fits.conf.reset('strip_header_whitespace') fits.conf.reset('use_memmap') def copy_file(self, filename): """Copies a backup of a test data file to the temp dir and sets its mode to writeable. """ shutil.copy(self.data(filename), self.temp(filename)) os.chmod(self.temp(filename), stat.S_IREAD | stat.S_IWRITE) def data(self, filename): """Returns the path to a test data file.""" return os.path.join(self.data_dir, filename) def temp(self, filename): """ Returns the full path to a file in the test temp dir.""" return os.path.join(self.temp_dir, filename)
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from copy import deepcopy import numpy as np import pytest import re from astropy import units as u from astropy.units import allclose from astropy.tests.helper import assert_quantity_allclose as assert_allclose from astropy.utils.exceptions import AstropyWarning from astropy.time import Time from astropy.coordinates import ( EarthLocation, galactocentric_frame_defaults, representation as r, SkyCoord, ) from astropy.coordinates.attributes import ( Attribute, CoordinateAttribute, DifferentialAttribute, EarthLocationAttribute, QuantityAttribute, TimeAttribute, ) from astropy.coordinates.baseframe import ( BaseCoordinateFrame, RepresentationMapping ) from astropy.coordinates.builtin_frames import ( AltAz, HADec, FK4, FK5, Galactic, Galactocentric, GCRS, HCRS, ICRS, ITRS ) from astropy.coordinates.representation import ( CartesianDifferential, REPRESENTATION_CLASSES, ) from .test_representation import unitphysics # this fixture is used below # noqa def setup_function(func): """Copy original 'REPRESENTATIONCLASSES' as attribute in function.""" func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES) def teardown_function(func): """Reset REPRESENTATION_CLASSES to original value.""" REPRESENTATION_CLASSES.clear() REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG) def test_frame_attribute_descriptor(): """Unit tests of the Attribute descriptor.""" class TestAttributes: attr_none = Attribute() attr_2 = Attribute(default=2) attr_3_attr2 = Attribute(default=3, secondary_attribute='attr_2') attr_none_attr2 = Attribute(default=None, secondary_attribute='attr_2') attr_none_nonexist = Attribute( default=None, secondary_attribute='nonexist' ) t = TestAttributes() # Defaults assert t.attr_none is None assert t.attr_2 == 2 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 assert t.attr_none_nonexist is None # No default and non-existent secondary attr # Setting values via '_'-prefixed internal vars # (as would normally done in __init__) t._attr_none = 10 assert t.attr_none == 10 t._attr_2 = 20 assert t.attr_2 == 20 assert t.attr_3_attr2 == 3 assert t.attr_none_attr2 == t.attr_2 t._attr_none_attr2 = 40 assert t.attr_none_attr2 == 40 # Make sure setting values via public attribute fails with pytest.raises(AttributeError) as err: t.attr_none = 5 assert 'Cannot set frame attribute' in str(err.value) def test_frame_subclass_attribute_descriptor(): """Unit test of the attribute descriptors in subclasses.""" _EQUINOX_B1980 = Time('B1980', scale='tai') class MyFK4(FK4): # equinox inherited from FK4, obstime overridden, and newattr is new obstime = TimeAttribute(default=_EQUINOX_B1980) newattr = Attribute(default='newattr') mfk4 = MyFK4() assert mfk4.equinox.value == 'B1950.000' assert mfk4.obstime.value == 'B1980.000' assert mfk4.newattr == 'newattr' assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr']) mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world') assert mfk4.equinox.value == 'J1980.000' assert mfk4.obstime.value == 'J1990.000' assert mfk4.newattr == 'world' def test_frame_multiple_inheritance_attribute_descriptor(): """ Ensure that all attributes are accumulated in case of inheritance from multiple BaseCoordinateFrames. See https://github.com/astropy/astropy/pull/11099#issuecomment-735829157 """ class Frame1(BaseCoordinateFrame): attr1 = Attribute() class Frame2(BaseCoordinateFrame): attr2 = Attribute() class Frame3(Frame1, Frame2): pass assert len(Frame3.frame_attributes) == 2 assert 'attr1' in Frame3.frame_attributes assert 'attr2' in Frame3.frame_attributes # In case the same attribute exists in both frames, the one from the # left-most class in the MRO should take precedence class Frame4(BaseCoordinateFrame): attr1 = Attribute() attr2 = Attribute() class Frame5(Frame1, Frame4): pass assert Frame5.frame_attributes['attr1'] is Frame1.frame_attributes['attr1'] assert Frame5.frame_attributes['attr2'] is Frame4.frame_attributes['attr2'] def test_differentialattribute(): # Test logic of passing input through to allowed class vel = [1, 2, 3]*u.km/u.s dif = r.CartesianDifferential(vel) class TestFrame(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential]) frame1 = TestFrame() frame2 = TestFrame(attrtest=dif) frame3 = TestFrame(attrtest=vel) assert np.all(frame1.attrtest.d_xyz == frame2.attrtest.d_xyz) assert np.all(frame1.attrtest.d_xyz == frame3.attrtest.d_xyz) # This shouldn't work if there is more than one allowed class: class TestFrame2(BaseCoordinateFrame): attrtest = DifferentialAttribute( default=dif, allowed_classes=[r.CartesianDifferential, r.CylindricalDifferential]) frame1 = TestFrame2() frame2 = TestFrame2(attrtest=dif) with pytest.raises(TypeError): TestFrame2(attrtest=vel) def test_create_data_frames(): # from repr i1 = ICRS(r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)) i2 = ICRS(r.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg)) # from preferred name i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) i4 = ICRS(ra=1*u.deg, dec=2*u.deg) assert i1.data.lat == i3.data.lat assert i1.data.lon == i3.data.lon assert i1.data.distance == i3.data.distance assert i2.data.lat == i4.data.lat assert i2.data.lon == i4.data.lon # now make sure the preferred names work as properties assert_allclose(i1.ra, i3.ra) assert_allclose(i2.ra, i4.ra) assert_allclose(i1.distance, i3.distance) with pytest.raises(AttributeError): i1.ra = [11.]*u.deg def test_create_orderered_data(): TOL = 1e-10*u.deg i = ICRS(1*u.deg, 2*u.deg) assert (i.ra - 1*u.deg) < TOL assert (i.dec - 2*u.deg) < TOL g = Galactic(1*u.deg, 2*u.deg) assert (g.l - 1*u.deg) < TOL assert (g.b - 2*u.deg) < TOL a = AltAz(1*u.deg, 2*u.deg) assert (a.az - 1*u.deg) < TOL assert (a.alt - 2*u.deg) < TOL with pytest.raises(TypeError): ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg) with pytest.raises(TypeError): sph = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) ICRS(sph, 1*u.deg, 2*u.deg) def test_create_nodata_frames(): i = ICRS() assert len(i.get_frame_attr_names()) == 0 f5 = FK5() assert f5.equinox == FK5.get_frame_attr_names()['equinox'] f4 = FK4() assert f4.equinox == FK4.get_frame_attr_names()['equinox'] # obstime is special because it's a property that uses equinox if obstime is not set assert f4.obstime in (FK4.get_frame_attr_names()['obstime'], FK4.get_frame_attr_names()['equinox']) def test_no_data_nonscalar_frames(): a1 = AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3, 1)) * u.deg_C) assert a1.obstime.shape == (3, 10) assert a1.temperature.shape == (3, 10) assert a1.shape == (3, 10) with pytest.raises(ValueError) as exc: AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day, temperature=np.ones((3,)) * u.deg_C) assert 'inconsistent shapes' in str(exc.value) def test_frame_repr(): i = ICRS() assert repr(i) == '<ICRS Frame>' f5 = FK5() assert repr(f5).startswith('<FK5 Frame (equinox=') i2 = ICRS(ra=1*u.deg, dec=2*u.deg) i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' (1., 2., 3.)>') # try with arrays i2 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[2.1, 3.1]*u.deg) i3 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[-15.6, 17.1]*u.deg, distance=[11., 21.]*u.kpc) assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' [(1.1, 2.1), (2.1, 3.1)]>') assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n' ' [(1.1, -15.6, 11.), (2.1, 17.1, 21.)]>') def test_frame_repr_vels(): i = ICRS(ra=1*u.deg, dec=2*u.deg, pm_ra_cosdec=1*u.marcsec/u.yr, pm_dec=2*u.marcsec/u.yr) # unit comes out as mas/yr because of the preferred units defined in the # frame RepresentationMapping assert repr(i) == ('<ICRS Coordinate: (ra, dec) in deg\n' ' (1., 2.)\n' ' (pm_ra_cosdec, pm_dec) in mas / yr\n' ' (1., 2.)>') def test_converting_units(): # this is a regular expression that with split (see below) removes what's # the decimal point to fix rounding problems rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)') # Use values that aren't subject to rounding down to X.9999... i2 = ICRS(ra=2.*u.deg, dec=2.*u.deg) i2_many = ICRS(ra=[2., 4.]*u.deg, dec=[2., -8.1]*u.deg) # converting from FK5 to ICRS and back changes the *internal* representation, # but it should still come out in the preferred form i4 = i2.transform_to(FK5()).transform_to(ICRS()) i4_many = i2_many.transform_to(FK5()).transform_to(ICRS()) ri2 = ''.join(rexrepr.split(repr(i2))) ri4 = ''.join(rexrepr.split(repr(i4))) assert ri2 == ri4 assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed ri2_many = ''.join(rexrepr.split(repr(i2_many))) ri4_many = ''.join(rexrepr.split(repr(i4_many))) assert ri2_many == ri4_many assert i2_many.data.lon.unit != i4_many.data.lon.unit # Internal repr changed # but that *shouldn't* hold if we turn off units for the representation class FakeICRS(ICRS): frame_specific_representation_info = { 'spherical': [RepresentationMapping('lon', 'ra', u.hourangle), RepresentationMapping('lat', 'dec', None), RepresentationMapping('distance', 'distance')] # should fall back to default of None unit } fi = FakeICRS(i4.data) ri2 = ''.join(rexrepr.split(repr(i2))) rfi = ''.join(rexrepr.split(repr(fi))) rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match assert ri2 != rfi # the attributes should also get the right units assert i2.dec.unit == i4.dec.unit # unless no/explicitly given units assert i2.dec.unit != fi.dec.unit assert i2.ra.unit != fi.ra.unit assert fi.ra.unit == u.hourangle def test_representation_info(): class NewICRS1(ICRS): frame_specific_representation_info = { r.SphericalRepresentation: [ RepresentationMapping('lon', 'rara', u.hourangle), RepresentationMapping('lat', 'decdec', u.degree), RepresentationMapping('distance', 'distance', u.kpc)] } i1 = NewICRS1(rara=10*u.degree, decdec=-12*u.deg, distance=1000*u.pc, pm_rara_cosdecdec=100*u.mas/u.yr, pm_decdec=17*u.mas/u.yr, radial_velocity=10*u.km/u.s) assert allclose(i1.rara, 10*u.deg) assert i1.rara.unit == u.hourangle assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.distance, 1000*u.pc) assert i1.distance.unit == u.kpc assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # this should auto-set the names of UnitSpherical: i1.set_representation_cls(r.UnitSphericalRepresentation, s=r.UnitSphericalCosLatDifferential) assert allclose(i1.rara, 10*u.deg) assert allclose(i1.decdec, -12*u.deg) assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr) assert allclose(i1.pm_decdec, 17*u.mas/u.yr) # For backwards compatibility, we also support the string name in the # representation info dictionary: class NewICRS2(ICRS): frame_specific_representation_info = { 'spherical': [ RepresentationMapping('lon', 'ang1', u.hourangle), RepresentationMapping('lat', 'ang2', u.degree), RepresentationMapping('distance', 'howfar', u.kpc)] } i2 = NewICRS2(ang1=10*u.degree, ang2=-12*u.deg, howfar=1000*u.pc) assert allclose(i2.ang1, 10*u.deg) assert i2.ang1.unit == u.hourangle assert allclose(i2.ang2, -12*u.deg) assert allclose(i2.howfar, 1000*u.pc) assert i2.howfar.unit == u.kpc # Test that the differential kwargs get overridden class NewICRS3(ICRS): frame_specific_representation_info = { r.SphericalCosLatDifferential: [ RepresentationMapping('d_lon_coslat', 'pm_ang1', u.hourangle/u.year), RepresentationMapping('d_lat', 'pm_ang2'), RepresentationMapping('d_distance', 'vlos', u.kpc/u.Myr)] } i3 = NewICRS3(lon=10*u.degree, lat=-12*u.deg, distance=1000*u.pc, pm_ang1=1*u.mas/u.yr, pm_ang2=2*u.mas/u.yr, vlos=100*u.km/u.s) assert allclose(i3.pm_ang1, 1*u.mas/u.yr) assert i3.pm_ang1.unit == u.hourangle/u.year assert allclose(i3.pm_ang2, 2*u.mas/u.yr) assert allclose(i3.vlos, 100*u.km/u.s) assert i3.vlos.unit == u.kpc/u.Myr def test_realizing(): rep = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) i = ICRS() i2 = i.realize_frame(rep) assert not i.has_data assert i2.has_data f = FK5(equinox=Time('J2001')) f2 = f.realize_frame(rep) assert not f.has_data assert f2.has_data assert f2.equinox == f.equinox assert f2.equinox != FK5.get_frame_attr_names()['equinox'] # Check that a nicer error message is returned: with pytest.raises(TypeError) as excinfo: f.realize_frame(f.representation_type) assert ('Class passed as data instead of a representation' in excinfo.value.args[0]) def test_replicating(): i = ICRS(ra=[1]*u.deg, dec=[2]*u.deg) icopy = i.replicate(copy=True) irepl = i.replicate(copy=False) i.data._lat[:] = 0*u.deg assert np.all(i.data.lat == irepl.data.lat) assert np.all(i.data.lat != icopy.data.lat) iclone = i.replicate_without_data() assert i.has_data assert not iclone.has_data aa = AltAz(alt=1*u.deg, az=2*u.deg, obstime=Time('J2000')) aaclone = aa.replicate_without_data(obstime=Time('J2001')) assert not aaclone.has_data assert aa.obstime != aaclone.obstime assert aa.pressure == aaclone.pressure assert aa.obswl == aaclone.obswl def test_getitem(): rep = r.SphericalRepresentation( [1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc) i = ICRS(rep) assert len(i.ra) == 3 iidx = i[1:] assert len(iidx.ra) == 2 iidx2 = i[0] assert iidx2.ra.isscalar def test_transform(): """ This test just makes sure the transform architecture works, but does *not* actually test all the builtin transforms themselves are accurate. """ i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ == r.UnitSphericalRepresentation assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) f = i.transform_to(FK5()) i2 = f.transform_to(ICRS()) assert i2.data.__class__ != r.UnitSphericalRepresentation f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f4 = f.transform_to(FK4()) f4_2 = f.transform_to(FK4(equinox=f.equinox)) # make sure attributes are copied over correctly assert f4.equinox == FK4().equinox assert f4_2.equinox == f.equinox # make sure self-transforms also work i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i.transform_to(ICRS()) assert_allclose(i.ra, i2.ra) assert_allclose(i.dec, i2.dec) f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001')) f2 = f.transform_to(FK5()) # default equinox, so should be *different* assert f2.equinox == FK5().equinox with pytest.raises(AssertionError): assert_allclose(f.ra, f2.ra) with pytest.raises(AssertionError): assert_allclose(f.dec, f2.dec) # finally, check Galactic round-tripping i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) i2 = i1.transform_to(Galactic()).transform_to(ICRS()) assert_allclose(i1.ra, i2.ra) assert_allclose(i1.dec, i2.dec) def test_transform_to_nonscalar_nodata_frame(): # https://github.com/astropy/astropy/pull/5254#issuecomment-241592353 times = Time('2016-08-23') + np.linspace(0, 10, 12)*u.day coo1 = ICRS(ra=[[0.], [10.], [20.]]*u.deg, dec=[[-30.], [30.], [60.]]*u.deg) coo2 = coo1.transform_to(FK5(equinox=times)) assert coo2.shape == (3, 12) def test_setitem_no_velocity(): """Test different flavors of item setting for a Frame without a velocity. """ obstime = 'B1955' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = sc0.copy() sc1_repr = repr(sc1) assert 'representation' in sc1.cache sc1[1] = sc2[0] assert sc1.cache == {} assert repr(sc2) != sc1_repr assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) # Works for array-valued obstime so long as they are considered equivalent sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, obstime]) sc1[0] = sc2[0] # Multidimensional coordinates sc1 = FK4([[1, 2], [3, 4]] * u.deg, [[5, 6], [7, 8]] * u.deg) sc2 = FK4([[10, 20], [30, 40]] * u.deg, [[50, 60], [70, 80]] * u.deg) sc1[0] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [[10, 20], [3, 4]]) assert np.allclose(sc1.dec.to_value(u.deg), [[50, 60], [7, 8]]) def test_setitem_velocities(): """Test different flavors of item setting for a Frame with a velocity. """ sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s, obstime='B1950') sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, radial_velocity=[10, 20]*u.km/u.s, obstime='B1950') sc1 = sc0.copy() sc1[1] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [1, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [3, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [1, 10]) assert sc1.obstime == sc2.obstime assert sc1.name == 'fk4' sc1 = sc0.copy() sc1[:] = sc2[0] assert np.allclose(sc1.ra.to_value(u.deg), [10, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 10]) sc1 = sc0.copy() sc1[:] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [10, 20]) assert np.allclose(sc1.dec.to_value(u.deg), [30, 40]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [10, 20]) sc1 = sc0.copy() sc1[[1, 0]] = sc2[:] assert np.allclose(sc1.ra.to_value(u.deg), [20, 10]) assert np.allclose(sc1.dec.to_value(u.deg), [40, 30]) assert np.allclose(sc1.radial_velocity.to_value(u.km / u.s), [20, 10]) def test_setitem_exceptions(): obstime = 'B1950' sc0 = FK4([1, 2]*u.deg, [3, 4]*u.deg) sc2 = FK4([10, 20]*u.deg, [30, 40]*u.deg, obstime=obstime) sc1 = Galactic(sc0.ra, sc0.dec) with pytest.raises(TypeError, match='can only set from object of same class: ' 'Galactic vs. FK4'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime='B2001') with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra[0], sc0.dec[0], obstime=obstime) with pytest.raises(TypeError, match="scalar 'FK4' frame object does not support " 'item assignment'): sc1[0] = sc2[0] sc1 = FK4(obstime=obstime) with pytest.raises(ValueError, match='cannot set frame which has no data'): sc1[0] = sc2[0] sc1 = FK4(sc0.ra, sc0.dec, obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] # Wrong shape sc1 = FK4([sc0.ra], [sc0.dec], obstime=[obstime, 'B1980']) with pytest.raises(ValueError, match='can only set frame item from an equivalent frame'): sc1[0] = sc2[0] def test_sep(): i1 = ICRS(ra=0*u.deg, dec=1*u.deg) i2 = ICRS(ra=0*u.deg, dec=2*u.deg) sep = i1.separation(i2) assert sep.deg == 1 i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc) sep3d = i3.separation_3d(i4) assert_allclose(sep3d.to(u.kpc), np.array([1, 1])*u.kpc) # check that it works even with velocities i5 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) i6 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[7, 8]*u.kpc, pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr, radial_velocity=[5, 6]*u.km/u.s) sep3d = i5.separation_3d(i6) assert_allclose(sep3d.to(u.kpc), np.array([2, 2])*u.kpc) # 3d separations of dimensionless distances should still work i7 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.one) i8 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=4*u.one) sep3d = i7.separation_3d(i8) assert_allclose(sep3d, 1*u.one) # but should fail with non-dimensionless with pytest.raises(ValueError): i7.separation_3d(i3) def test_time_inputs(): """ Test validation and conversion of inputs for equinox and obstime attributes. """ c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00') assert c.equinox == Time('J2001.5') assert c.obstime == Time('2000-01-01 12:00:00') with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5) assert 'Invalid time input' in str(err.value) with pytest.raises(ValueError) as err: c = FK4(1 * u.deg, 2 * u.deg, obstime='hello') assert 'Invalid time input' in str(err.value) # A vector time should work if the shapes match, but we don't automatically # broadcast the basic data (just like time). FK4([1, 2] * u.deg, [2, 3] * u.deg, obstime=['J2000', 'J2001']) with pytest.raises(ValueError) as err: FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001']) assert 'shape' in str(err.value) def test_is_frame_attr_default(): """ Check that the `is_frame_attr_default` machinery works as expected """ c1 = FK5(ra=1*u.deg, dec=1*u.deg) c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox']) c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5')) assert c1.equinox == c2.equinox assert c1.equinox != c3.equinox assert c1.is_frame_attr_default('equinox') assert not c2.is_frame_attr_default('equinox') assert not c3.is_frame_attr_default('equinox') c4 = c1.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) c5 = c2.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) assert c4.is_frame_attr_default('equinox') assert not c5.is_frame_attr_default('equinox') def test_altaz_attributes(): aa = AltAz(1*u.deg, 2*u.deg) assert aa.obstime is None assert aa.location is None aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000') assert aa2.obstime == Time('J2000') aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert isinstance(aa3.location, EarthLocation) def test_hadec_attributes(): hd = HADec(1*u.hourangle, 2*u.deg) assert hd.ha == 1.*u.hourangle assert hd.dec == 2*u.deg assert hd.obstime is None assert hd.location is None hd2 = HADec(23*u.hourangle, -2*u.deg, obstime='J2000', location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) assert_allclose(hd2.ha, -1*u.hourangle) assert hd2.dec == -2*u.deg assert hd2.obstime == Time('J2000') assert isinstance(hd2.location, EarthLocation) sr = hd2.represent_as(r.SphericalRepresentation) assert_allclose(sr.lon, -1*u.hourangle) def test_representation(): """ Test the getter and setter properties for `representation` """ # Create the frame object. icrs = ICRS(ra=1*u.deg, dec=1*u.deg) data = icrs.data # Create some representation objects. icrs_cart = icrs.cartesian icrs_spher = icrs.spherical icrs_cyl = icrs.cylindrical # Testing when `_representation` set to `CartesianRepresentation`. icrs.representation_type = r.CartesianRepresentation assert icrs.representation_type == r.CartesianRepresentation assert icrs_cart.x == icrs.x assert icrs_cart.y == icrs.y assert icrs_cart.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CartesianRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing when `_representation` set to `CylindricalRepresentation`. icrs.representation_type = r.CylindricalRepresentation assert icrs.representation_type == r.CylindricalRepresentation assert icrs.data == data # Testing setter input using text argument for spherical. icrs.representation_type = 'spherical' assert icrs.representation_type is r.SphericalRepresentation assert icrs_spher.lat == icrs.dec assert icrs_spher.lon == icrs.ra assert icrs_spher.distance == icrs.distance assert icrs.data == data # Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes. for attr in ('x', 'y', 'z'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) # Testing setter input using text argument for cylindrical. icrs.representation_type = 'cylindrical' assert icrs.representation_type is r.CylindricalRepresentation assert icrs_cyl.rho == icrs.rho assert icrs_cyl.phi == icrs.phi assert icrs_cyl.z == icrs.z assert icrs.data == data # Testing that an ICRS object in CylindricalRepresentation must not have spherical attributes. for attr in ('ra', 'dec', 'distance'): with pytest.raises(AttributeError) as err: getattr(icrs, attr) assert 'object has no attribute' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = 'WRONG' assert 'but must be a BaseRepresentation class' in str(err.value) with pytest.raises(ValueError) as err: icrs.representation_type = ICRS assert 'but must be a BaseRepresentation class' in str(err.value) def test_represent_as(): icrs = ICRS(ra=1*u.deg, dec=1*u.deg) cart1 = icrs.represent_as('cartesian') cart2 = icrs.represent_as(r.CartesianRepresentation) cart1.x == cart2.x cart1.y == cart2.y cart1.z == cart2.z # now try with velocities icrs = ICRS(ra=0*u.deg, dec=0*u.deg, distance=10*u.kpc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=1*u.km/u.s) # single string rep2 = icrs.represent_as('cylindrical') assert isinstance(rep2, r.CylindricalRepresentation) assert isinstance(rep2.differentials['s'], r.CylindricalDifferential) # single class with positional in_frame_units, verify that warning raised with pytest.warns(AstropyWarning, match='argument position') as w: icrs.represent_as(r.CylindricalRepresentation, False) assert len(w) == 1 # TODO: this should probably fail in the future once we figure out a better # workaround for dealing with UnitSphericalRepresentation's with # RadialDifferential's # two classes # rep2 = icrs.represent_as(r.CartesianRepresentation, # r.SphericalCosLatDifferential) # assert isinstance(rep2, r.CartesianRepresentation) # assert isinstance(rep2.differentials['s'], r.SphericalCosLatDifferential) with pytest.raises(ValueError): icrs.represent_as('odaigahara') def test_shorthand_representations(): rep = r.CartesianRepresentation([1, 2, 3]*u.pc) dif = r.CartesianDifferential([1, 2, 3]*u.km/u.s) rep = rep.with_differentials(dif) icrs = ICRS(rep) cyl = icrs.cylindrical assert isinstance(cyl, r.CylindricalRepresentation) assert isinstance(cyl.differentials['s'], r.CylindricalDifferential) sph = icrs.spherical assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalDifferential) sph = icrs.sphericalcoslat assert isinstance(sph, r.SphericalRepresentation) assert isinstance(sph.differentials['s'], r.SphericalCosLatDifferential) def test_equal(): obstime = 'B1955' sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, obstime=obstime) sc2 = FK4([1, 20]*u.deg, [3, 4]*u.deg, obstime=obstime) # Compare arrays and scalars eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa (numpy True not Python True) assert (sc1[0] != sc2[0]) == False # noqa # Broadcasting eq = sc1[0] == sc2 ne = sc1[0] != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) # With diff only in velocity sc1 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 2]*u.km/u.s) sc2 = FK4([1, 2]*u.deg, [3, 4]*u.deg, radial_velocity=[1, 20]*u.km/u.s) eq = sc1 == sc2 ne = sc1 != sc2 assert np.all(eq == [True, False]) assert np.all(ne == [False, True]) assert (sc1[0] == sc2[0]) == True # noqa assert (sc1[0] != sc2[0]) == False # noqa assert (FK4() == ICRS()) is False assert (FK4() == FK4(obstime='J1999')) is False def test_equal_exceptions(): # Shape mismatch sc1 = FK4([1, 2, 3]*u.deg, [3, 4, 5]*u.deg) with pytest.raises(ValueError, match='cannot compare: shape mismatch'): sc1 == sc1[:2] # Different representation_type sc1 = FK4(1, 2, 3, representation_type='cartesian') sc2 = FK4(1*u.deg, 2*u.deg, 2, representation_type='spherical') with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: CartesianRepresentation vs. SphericalRepresentation'): sc1 == sc2 # Different differential type sc1 = FK4(1*u.deg, 2*u.deg, radial_velocity=1*u.km/u.s) sc2 = FK4(1*u.deg, 2*u.deg, pm_ra_cosdec=1*u.mas/u.yr, pm_dec=1*u.mas/u.yr) with pytest.raises(TypeError, match='cannot compare: objects must have same ' 'class: RadialDifferential vs. UnitSphericalCosLatDifferential'): sc1 == sc2 # Different frame attribute sc1 = FK5(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J1999') with pytest.raises(TypeError, match=r'cannot compare: objects must have equivalent ' r'frames: <FK5 Frame \(equinox=J2000.000\)> ' r'vs. <FK5 Frame \(equinox=J1999.000\)>'): sc1 == sc2 # Different frame sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') with pytest.raises(TypeError, match='cannot compare: objects must have equivalent ' r'frames: <FK4 Frame \(equinox=B1950.000, obstime=B1950.000\)> ' r'vs. <FK5 Frame \(equinox=J2000.000\)>'): sc1 == sc2 sc1 = FK4(1*u.deg, 2*u.deg) sc2 = FK4() with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc1 == sc2 with pytest.raises(ValueError, match='cannot compare: one frame has data and ' 'the other does not'): sc2 == sc1 def test_dynamic_attrs(): c = ICRS(1*u.deg, 2*u.deg) assert 'ra' in dir(c) assert 'dec' in dir(c) with pytest.raises(AttributeError) as err: c.blahblah assert "object has no attribute 'blahblah'" in str(err.value) with pytest.raises(AttributeError) as err: c.ra = 1 assert "Cannot set any frame attribute" in str(err.value) c.blahblah = 1 assert c.blahblah == 1 def test_nodata_error(): i = ICRS() with pytest.raises(ValueError) as excinfo: i.data assert 'does not have associated data' in str(excinfo.value) def test_len0_data(): i = ICRS([]*u.deg, []*u.deg) assert i.has_data repr(i) def test_quantity_attributes(): # make sure we can create a GCRS frame with valid inputs GCRS(obstime='J2002', obsgeoloc=[1, 2, 3]*u.km, obsgeovel=[4, 5, 6]*u.km/u.s) # make sure it fails for invalid lovs or vels with pytest.raises(TypeError): GCRS(obsgeoloc=[1, 2, 3]) # no unit with pytest.raises(u.UnitsError): GCRS(obsgeoloc=[1, 2, 3]*u.km/u.s) # incorrect unit with pytest.raises(ValueError): GCRS(obsgeoloc=[1, 3]*u.km) # incorrect shape def test_quantity_attribute_default(): # The default default (yes) is None: class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.deg) frame = MyCoord() assert frame.someval is None frame = MyCoord(someval=15*u.deg) assert u.isclose(frame.someval, 15*u.deg) # This should work if we don't explicitly pass in a unit, but we pass in a # default value with a unit class MyCoord2(BaseCoordinateFrame): someval = QuantityAttribute(15*u.deg) frame = MyCoord2() assert u.isclose(frame.someval, 15*u.deg) # Since here no shape was given, we can set to any shape we like. frame = MyCoord2(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert np.all(frame.someval == 1*u.deg) # We should also be able to insist on a given shape. class MyCoord3(BaseCoordinateFrame): someval = QuantityAttribute(unit=u.arcsec, shape=(3,)) frame = MyCoord3(someval=np.ones(3)*u.deg) assert frame.someval.shape == (3,) assert frame.someval.unit == u.arcsec assert u.allclose(frame.someval.value, 3600.) # The wrong shape raises. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=1.*u.deg) # As does the wrong unit. with pytest.raises(u.UnitsError): MyCoord3(someval=np.ones(3)*u.m) # We are allowed a short-cut for zero. frame0 = MyCoord3(someval=0) assert frame0.someval.shape == (3,) assert frame0.someval.unit == u.arcsec assert np.all(frame0.someval.value == 0.) # But not if it has the wrong shape. with pytest.raises(ValueError, match='shape'): MyCoord3(someval=np.zeros(2)) # This should fail, if we don't pass in a default or a unit with pytest.raises(ValueError): class MyCoord(BaseCoordinateFrame): someval = QuantityAttribute() def test_eloc_attributes(): el = EarthLocation(lon=12.3*u.deg, lat=45.6*u.deg, height=1*u.km) it = ITRS(r.SphericalRepresentation(lon=12.3*u.deg, lat=45.6*u.deg, distance=1*u.km)) gc = GCRS(ra=12.3*u.deg, dec=45.6*u.deg, distance=6375*u.km) el1 = AltAz(location=el).location assert isinstance(el1, EarthLocation) # these should match *exactly* because the EarthLocation assert el1.lat == el.lat assert el1.lon == el.lon assert el1.height == el.height el2 = AltAz(location=it).location assert isinstance(el2, EarthLocation) # these should *not* match because giving something in Spherical ITRS is # *not* the same as giving it as an EarthLocation: EarthLocation is on an # elliptical geoid. So the longitude should match (because flattening is # only along the z-axis), but latitude should not. Also, height is relative # to the *surface* in EarthLocation, but the ITRS distance is relative to # the center of the Earth assert not allclose(el2.lat, it.spherical.lat) assert allclose(el2.lon, it.spherical.lon) assert el2.height < -6000*u.km el3 = AltAz(location=gc).location # GCRS inputs implicitly get transformed to ITRS and then onto # EarthLocation's elliptical geoid. So both lat and lon shouldn't match assert isinstance(el3, EarthLocation) assert not allclose(el3.lat, gc.dec) assert not allclose(el3.lon, gc.ra) assert np.abs(el3.height) < 500*u.km def test_equivalent_frames(): i = ICRS() i2 = ICRS(1*u.deg, 2*u.deg) assert i.is_equivalent_frame(i) assert i.is_equivalent_frame(i2) with pytest.raises(TypeError): assert i.is_equivalent_frame(10) with pytest.raises(TypeError): assert i2.is_equivalent_frame(SkyCoord(i2)) f0 = FK5() # this J2000 is TT f1 = FK5(equinox='J2000') f2 = FK5(1*u.deg, 2*u.deg, equinox='J2000') f3 = FK5(equinox='J2010') f4 = FK4(equinox='J2010') assert f1.is_equivalent_frame(f1) assert not i.is_equivalent_frame(f1) assert f0.is_equivalent_frame(f1) assert f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f3.is_equivalent_frame(f4) aa1 = AltAz() aa2 = AltAz(obstime='J2010') assert aa2.is_equivalent_frame(aa2) assert not aa1.is_equivalent_frame(i) assert not aa1.is_equivalent_frame(aa2) def test_equivalent_frame_coordinateattribute(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) # These frames should not be considered equivalent f0 = FrameWithCoordinateAttribute() f1 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2000')) f2 = FrameWithCoordinateAttribute(coord_attr=HCRS(3*u.deg, 4*u.deg, obstime='J2000')) f3 = FrameWithCoordinateAttribute(coord_attr=HCRS(1*u.deg, 2*u.deg, obstime='J2001')) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) assert not f1.is_equivalent_frame(f2) assert not f1.is_equivalent_frame(f3) assert not f2.is_equivalent_frame(f3) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) assert f2.is_equivalent_frame(deepcopy(f2)) assert f3.is_equivalent_frame(deepcopy(f3)) def test_equivalent_frame_locationattribute(): class FrameWithLocationAttribute(BaseCoordinateFrame): loc_attr = EarthLocationAttribute() # These frames should not be considered equivalent f0 = FrameWithLocationAttribute() location = EarthLocation(lat=-34, lon=19, height=300) f1 = FrameWithLocationAttribute(loc_attr=location) assert not f0.is_equivalent_frame(f1) assert not f1.is_equivalent_frame(f0) # They each should still be equivalent to a deep copy of themselves assert f0.is_equivalent_frame(deepcopy(f0)) assert f1.is_equivalent_frame(deepcopy(f1)) def test_representation_subclass(): # Regression test for #3354 # Normally when instantiating a frame without a distance the frame will try # and use UnitSphericalRepresentation internally instead of # SphericalRepresentation. frame = FK5(representation_type=r.SphericalRepresentation, ra=32 * u.deg, dec=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == r.SphericalRepresentation # If using a SphericalRepresentation class this used to not work, so we # test here that this is now fixed. class NewSphericalRepresentation(r.SphericalRepresentation): attr_classes = r.SphericalRepresentation.attr_classes frame = FK5(representation_type=NewSphericalRepresentation, lon=32 * u.deg, lat=20 * u.deg) assert type(frame._data) == r.UnitSphericalRepresentation assert frame.representation_type == NewSphericalRepresentation # A similar issue then happened in __repr__ with subclasses of # SphericalRepresentation. assert repr(frame) == ("<FK5 Coordinate (equinox=J2000.000): (lon, lat) in deg\n" " (32., 20.)>") # A more subtle issue is when specifying a custom # UnitSphericalRepresentation subclass for the data and # SphericalRepresentation or a subclass for the representation. class NewUnitSphericalRepresentation(r.UnitSphericalRepresentation): attr_classes = r.UnitSphericalRepresentation.attr_classes def __repr__(self): return "<NewUnitSphericalRepresentation: spam spam spam>" frame = FK5(NewUnitSphericalRepresentation(lon=32 * u.deg, lat=20 * u.deg), representation_type=NewSphericalRepresentation) assert repr(frame) == "<FK5 Coordinate (equinox=J2000.000): spam spam spam>" def test_getitem_representation(): """ Make sure current representation survives __getitem__ even if different from data representation. """ c = ICRS([1, 1] * u.deg, [2, 2] * u.deg) c.representation_type = 'cartesian' assert c[0].representation_type is r.CartesianRepresentation def test_component_error_useful(): """ Check that a data-less frame gives useful error messages about not having data when the attributes asked for are possible coordinate components """ i = ICRS() with pytest.raises(ValueError) as excinfo: i.ra assert 'does not have associated data' in str(excinfo.value) with pytest.raises(AttributeError) as excinfo1: i.foobar with pytest.raises(AttributeError) as excinfo2: i.lon # lon is *not* the component name despite being the underlying representation's name assert "object has no attribute 'foobar'" in str(excinfo1.value) assert "object has no attribute 'lon'" in str(excinfo2.value) def test_cache_clear(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) assert len(i.cache['representation']) == 2 i.cache.clear() assert len(i.cache['representation']) == 0 def test_inplace_array(): i = ICRS([[1, 2], [3, 4]]*u.deg, [[10, 20], [30, 40]]*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[:, 0] = [100, 200]*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert_allclose(i.ra, [[100, 2], [200, 4]]*u.deg) assert_allclose(i.dec, [[10, 20], [30, 40]]*u.deg) def test_inplace_change(): i = ICRS(1*u.deg, 2*u.deg) # Add an in frame units version of the rep to the cache. repr(i) # Check that repr() has added a rep to the cache assert len(i.cache['representation']) == 2 # Modify the data i.data.lon[()] = 10*u.deg # Clear the cache i.cache.clear() # This will use a second (potentially cached rep) assert i.ra == 10 * u.deg assert i.dec == 2 * u.deg def test_representation_with_multiple_differentials(): dif1 = r.CartesianDifferential([1, 2, 3]*u.km/u.s) dif2 = r.CartesianDifferential([1, 2, 3]*u.km/u.s**2) rep = r.CartesianRepresentation([1, 2, 3]*u.pc, differentials={'s': dif1, 's2': dif2}) # check warning is raised for a scalar with pytest.raises(ValueError): ICRS(rep) def test_representation_arg_backwards_compatibility(): # TODO: this test can be removed when the `representation` argument is # removed from the BaseCoordinateFrame initializer. c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c2 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) c3 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian') assert c1.x == c2.x assert c1.y == c2.y assert c1.z == c2.z assert c1.x == c3.x assert c1.y == c3.y assert c1.z == c3.z assert c1.representation_type == c1.representation_type with pytest.raises(ValueError): ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type='cartesian', representation='cartesian') def test_missing_component_error_names(): """ This test checks that the component names are frame component names, not representation or differential names, when referenced in an exception raised when not passing in enough data. For example: ICRS(ra=10*u.deg) should state: TypeError: __init__() missing 1 required positional argument: 'dec' """ with pytest.raises(TypeError) as e: ICRS(ra=150 * u.deg) assert "missing 1 required positional argument: 'dec'" in str(e.value) with pytest.raises(TypeError) as e: ICRS(ra=150*u.deg, dec=-11*u.deg, pm_ra=100*u.mas/u.yr, pm_dec=10*u.mas/u.yr) assert "pm_ra_cosdec" in str(e.value) def test_non_spherical_representation_unit_creation(unitphysics): class PhysicsICRS(ICRS): default_representation = r.PhysicsSphericalRepresentation pic = PhysicsICRS(phi=1*u.deg, theta=25*u.deg, r=1*u.kpc) assert isinstance(pic.data, r.PhysicsSphericalRepresentation) picu = PhysicsICRS(phi=1*u.deg, theta=25*u.deg) assert isinstance(picu.data, unitphysics) def test_attribute_repr(): class Spam: def _astropy_repr_in_frame(self): return "TEST REPR" class TestFrame(BaseCoordinateFrame): attrtest = Attribute(default=Spam()) assert "TEST REPR" in repr(TestFrame()) def test_component_names_repr(): # Frame class with new component names that includes a name swap class NameChangeFrame(BaseCoordinateFrame): default_representation = r.PhysicsSphericalRepresentation frame_specific_representation_info = { r.PhysicsSphericalRepresentation: [ RepresentationMapping('phi', 'theta', u.deg), RepresentationMapping('theta', 'phi', u.arcsec), RepresentationMapping('r', 'JUSTONCE', u.AU)] } frame = NameChangeFrame(0*u.deg, 0*u.arcsec, 0*u.AU) # Check for the new names in the Frame repr assert "(theta, phi, JUSTONCE)" in repr(frame) # Check that the letter "r" has not been replaced more than once in the Frame repr assert repr(frame).count("JUSTONCE") == 1 @pytest.fixture def reset_galactocentric_defaults(): # TODO: this can be removed, along with the "warning" test below, once we # switch the default to 'latest' in v4.1 # Resets before each test, and after (the yield is pytest magic) galactocentric_frame_defaults.set('v4.0') yield galactocentric_frame_defaults.set('v4.0') def test_galactocentric_defaults(reset_galactocentric_defaults): with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() with galactocentric_frame_defaults.set('latest'): galcen_latest = Galactocentric() # parameters that changed assert not u.allclose(galcen_pre40.galcen_distance, galcen_40.galcen_distance) assert not u.allclose(galcen_pre40.z_sun, galcen_40.z_sun) for k in galcen_40.get_frame_attr_names(): if isinstance(getattr(galcen_40, k), BaseCoordinateFrame): continue # skip coordinate comparison... elif isinstance(getattr(galcen_40, k), CartesianDifferential): assert u.allclose(getattr(galcen_40, k).d_xyz, getattr(galcen_latest, k).d_xyz) else: assert getattr(galcen_40, k) == getattr(galcen_latest, k) # test validate Galactocentric with galactocentric_frame_defaults.set('latest'): params = galactocentric_frame_defaults.validate(galcen_latest) references = galcen_latest.frame_attribute_references state = dict(parameters=params, references=references) assert galactocentric_frame_defaults.parameters == params assert galactocentric_frame_defaults.references == references assert galactocentric_frame_defaults._state == state # Test not one of accepted parameter types with pytest.raises(ValueError): galactocentric_frame_defaults.validate(ValueError) # test parameters property assert ( galactocentric_frame_defaults.parameters == galactocentric_frame_defaults.parameters ) def test_galactocentric_references(reset_galactocentric_defaults): # references in the "scientific paper"-sense with galactocentric_frame_defaults.set('pre-v4.0'): galcen_pre40 = Galactocentric() for k in galcen_pre40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_pre40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_40 = Galactocentric() for k in galcen_40.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue assert k in galcen_40.frame_attribute_references with galactocentric_frame_defaults.set('v4.0'): galcen_custom = Galactocentric(z_sun=15*u.pc) for k in galcen_custom.get_frame_attr_names(): if k == 'roll': # no reference for this parameter continue if k == 'z_sun': assert k not in galcen_custom.frame_attribute_references else: assert k in galcen_custom.frame_attribute_references def test_coordinateattribute_transformation(): class FrameWithCoordinateAttribute(BaseCoordinateFrame): coord_attr = CoordinateAttribute(HCRS) hcrs = HCRS(1*u.deg, 2*u.deg, 3*u.AU, obstime='2001-02-03') f1_frame = FrameWithCoordinateAttribute(coord_attr=hcrs) f1_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(hcrs)) # The input is already HCRS, so the frame attribute should not change it assert f1_frame.coord_attr == hcrs # The output should not be different if a SkyCoord is provided assert f1_skycoord.coord_attr == f1_frame.coord_attr gcrs = GCRS(4*u.deg, 5*u.deg, 6*u.AU, obstime='2004-05-06') f2_frame = FrameWithCoordinateAttribute(coord_attr=gcrs) f2_skycoord = FrameWithCoordinateAttribute(coord_attr=SkyCoord(gcrs)) # The input needs to be converted from GCRS to HCRS assert isinstance(f2_frame.coord_attr, HCRS) # The `obstime` frame attribute should have been "merged" in a SkyCoord-style transformation assert f2_frame.coord_attr.obstime == gcrs.obstime # The output should not be different if a SkyCoord is provided assert f2_skycoord.coord_attr == f2_frame.coord_attr def test_realize_frame_accepts_kwargs(): c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, representation_type=r.CartesianRepresentation) new_data = r.CartesianRepresentation(x=11*u.pc, y=12*u.pc, z=13*u.pc) c2 = c1.realize_frame(new_data, representation_type="cartesian") c3 = c1.realize_frame(new_data, representation_type="cylindrical") assert c2.representation_type == r.CartesianRepresentation assert c3.representation_type == r.CylindricalRepresentation def test_nameless_frame_subclass(): """Note: this is a regression test for #11096""" class Test: pass # Subclass from a frame class and a non-frame class. # This subclassing is the test! class NewFrame(ICRS, Test): pass
astropy/astropy
astropy/coordinates/tests/test_frames.py
astropy/io/fits/tests/__init__.py
"""Support for monitoring juicenet/juicepoint/juicebox based EVSE switches.""" from homeassistant.components.switch import SwitchEntity from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR from .entity import JuiceNetDevice async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the JuiceNet switches.""" entities = [] juicenet_data = hass.data[DOMAIN][config_entry.entry_id] api = juicenet_data[JUICENET_API] coordinator = juicenet_data[JUICENET_COORDINATOR] for device in api.devices: entities.append(JuiceNetChargeNowSwitch(device, coordinator)) async_add_entities(entities) class JuiceNetChargeNowSwitch(JuiceNetDevice, SwitchEntity): """Implementation of a JuiceNet switch.""" def __init__(self, device, coordinator): """Initialise the switch.""" super().__init__(device, "charge_now", coordinator) @property def name(self): """Return the name of the device.""" return f"{self.device.name} Charge Now" @property def is_on(self): """Return true if switch is on.""" return self.device.override_time != 0 async def async_turn_on(self, **kwargs): """Charge now.""" await self.device.set_override(True) async def async_turn_off(self, **kwargs): """Don't charge now.""" await self.device.set_override(False)
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/juicenet/switch.py
"""Support for Melissa Climate A/C.""" import logging from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( FAN_AUTO, FAN_HIGH, FAN_LOW, FAN_MEDIUM, HVAC_MODE_AUTO, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT, HVAC_MODE_OFF, SUPPORT_FAN_MODE, SUPPORT_TARGET_TEMPERATURE, ) from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS from . import DATA_MELISSA _LOGGER = logging.getLogger(__name__) SUPPORT_FLAGS = SUPPORT_FAN_MODE | SUPPORT_TARGET_TEMPERATURE OP_MODES = [ HVAC_MODE_HEAT, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_OFF, ] FAN_MODES = [FAN_AUTO, FAN_HIGH, FAN_MEDIUM, FAN_LOW] async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Iterate through and add all Melissa devices.""" api = hass.data[DATA_MELISSA] devices = (await api.async_fetch_devices()).values() all_devices = [] for device in devices: if device["type"] == "melissa": all_devices.append(MelissaClimate(api, device["serial_number"], device)) async_add_entities(all_devices) class MelissaClimate(ClimateEntity): """Representation of a Melissa Climate device.""" def __init__(self, api, serial_number, init_data): """Initialize the climate device.""" self._name = init_data["name"] self._api = api self._serial_number = serial_number self._data = init_data["controller_log"] self._state = None self._cur_settings = None @property def name(self): """Return the name of the thermostat, if any.""" return self._name @property def fan_mode(self): """Return the current fan mode.""" if self._cur_settings is not None: return self.melissa_fan_to_hass(self._cur_settings[self._api.FAN]) @property def current_temperature(self): """Return the current temperature.""" if self._data: return self._data[self._api.TEMP] @property def current_humidity(self): """Return the current humidity value.""" if self._data: return self._data[self._api.HUMIDITY] @property def target_temperature_step(self): """Return the supported step of target temperature.""" return PRECISION_WHOLE @property def hvac_mode(self): """Return the current operation mode.""" if self._cur_settings is None: return None is_on = self._cur_settings[self._api.STATE] in ( self._api.STATE_ON, self._api.STATE_IDLE, ) if not is_on: return HVAC_MODE_OFF return self.melissa_op_to_hass(self._cur_settings[self._api.MODE]) @property def hvac_modes(self): """Return the list of available operation modes.""" return OP_MODES @property def fan_modes(self): """List of available fan modes.""" return FAN_MODES @property def target_temperature(self): """Return the temperature we try to reach.""" if self._cur_settings is None: return None return self._cur_settings[self._api.TEMP] @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def min_temp(self): """Return the minimum supported temperature for the thermostat.""" return 16 @property def max_temp(self): """Return the maximum supported temperature for the thermostat.""" return 30 @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS async def async_set_temperature(self, **kwargs): """Set new target temperature.""" temp = kwargs.get(ATTR_TEMPERATURE) await self.async_send({self._api.TEMP: temp}) async def async_set_fan_mode(self, fan_mode): """Set fan mode.""" melissa_fan_mode = self.hass_fan_to_melissa(fan_mode) await self.async_send({self._api.FAN: melissa_fan_mode}) async def async_set_hvac_mode(self, hvac_mode): """Set operation mode.""" if hvac_mode == HVAC_MODE_OFF: await self.async_send({self._api.STATE: self._api.STATE_OFF}) return mode = self.hass_mode_to_melissa(hvac_mode) await self.async_send( {self._api.MODE: mode, self._api.STATE: self._api.STATE_ON} ) async def async_send(self, value): """Send action to service.""" try: old_value = self._cur_settings.copy() self._cur_settings.update(value) except AttributeError: old_value = None if not await self._api.async_send( self._serial_number, "melissa", self._cur_settings ): self._cur_settings = old_value async def async_update(self): """Get latest data from Melissa.""" try: self._data = (await self._api.async_status(cached=True))[ self._serial_number ] self._cur_settings = ( await self._api.async_cur_settings(self._serial_number) )["controller"]["_relation"]["command_log"] except KeyError: _LOGGER.warning("Unable to update entity %s", self.entity_id) def melissa_op_to_hass(self, mode): """Translate Melissa modes to hass states.""" if mode == self._api.MODE_HEAT: return HVAC_MODE_HEAT if mode == self._api.MODE_COOL: return HVAC_MODE_COOL if mode == self._api.MODE_DRY: return HVAC_MODE_DRY if mode == self._api.MODE_FAN: return HVAC_MODE_FAN_ONLY _LOGGER.warning("Operation mode %s could not be mapped to hass", mode) return None def melissa_fan_to_hass(self, fan): """Translate Melissa fan modes to hass modes.""" if fan == self._api.FAN_AUTO: return HVAC_MODE_AUTO if fan == self._api.FAN_LOW: return FAN_LOW if fan == self._api.FAN_MEDIUM: return FAN_MEDIUM if fan == self._api.FAN_HIGH: return FAN_HIGH _LOGGER.warning("Fan mode %s could not be mapped to hass", fan) return None def hass_mode_to_melissa(self, mode): """Translate hass states to melissa modes.""" if mode == HVAC_MODE_HEAT: return self._api.MODE_HEAT if mode == HVAC_MODE_COOL: return self._api.MODE_COOL if mode == HVAC_MODE_DRY: return self._api.MODE_DRY if mode == HVAC_MODE_FAN_ONLY: return self._api.MODE_FAN _LOGGER.warning("Melissa have no setting for %s mode", mode) def hass_fan_to_melissa(self, fan): """Translate hass fan modes to melissa modes.""" if fan == HVAC_MODE_AUTO: return self._api.FAN_AUTO if fan == FAN_LOW: return self._api.FAN_LOW if fan == FAN_MEDIUM: return self._api.FAN_MEDIUM if fan == FAN_HIGH: return self._api.FAN_HIGH _LOGGER.warning("Melissa have no setting for %s fan mode", fan)
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/melissa/climate.py
"""Support for VELUX KLF 200 devices.""" import logging from pyvlx import PyVLX, PyVLXException import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv DOMAIN = "velux" DATA_VELUX = "data_velux" SUPPORTED_DOMAINS = ["cover", "scene"] _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( {vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string} ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the velux component.""" try: hass.data[DATA_VELUX] = VeluxModule(hass, config[DOMAIN]) hass.data[DATA_VELUX].setup() await hass.data[DATA_VELUX].async_start() except PyVLXException as ex: _LOGGER.exception("Can't connect to velux interface: %s", ex) return False for component in SUPPORTED_DOMAINS: hass.async_create_task( discovery.async_load_platform(hass, component, DOMAIN, {}, config) ) return True class VeluxModule: """Abstraction for velux component.""" def __init__(self, hass, domain_config): """Initialize for velux component.""" self.pyvlx = None self._hass = hass self._domain_config = domain_config def setup(self): """Velux component setup.""" async def on_hass_stop(event): """Close connection when hass stops.""" _LOGGER.debug("Velux interface terminated") await self.pyvlx.disconnect() async def async_reboot_gateway(service_call): await self.pyvlx.reboot_gateway() self._hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) host = self._domain_config.get(CONF_HOST) password = self._domain_config.get(CONF_PASSWORD) self.pyvlx = PyVLX(host=host, password=password) self._hass.services.async_register( DOMAIN, "reboot_gateway", async_reboot_gateway ) async def async_start(self): """Start velux component.""" _LOGGER.debug("Velux interface started") await self.pyvlx.load_scenes() await self.pyvlx.load_nodes()
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/velux/__init__.py
"""Insteon base entity.""" import functools import logging from pyinsteon import devices from homeassistant.core import callback from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from .const import ( DOMAIN, SIGNAL_ADD_DEFAULT_LINKS, SIGNAL_LOAD_ALDB, SIGNAL_PRINT_ALDB, SIGNAL_REMOVE_ENTITY, SIGNAL_SAVE_DEVICES, STATE_NAME_LABEL_MAP, ) from .utils import print_aldb_to_log _LOGGER = logging.getLogger(__name__) class InsteonEntity(Entity): """INSTEON abstract base entity.""" def __init__(self, device, group): """Initialize the INSTEON binary sensor.""" self._insteon_device_group = device.groups[group] self._insteon_device = device def __hash__(self): """Return the hash of the Insteon Entity.""" return hash(self._insteon_device) @property def should_poll(self): """No polling needed.""" return False @property def address(self): """Return the address of the node.""" return str(self._insteon_device.address) @property def group(self): """Return the INSTEON group that the entity responds to.""" return self._insteon_device_group.group @property def unique_id(self) -> str: """Return a unique ID.""" if self._insteon_device_group.group == 0x01: uid = self._insteon_device.id else: uid = f"{self._insteon_device.id}_{self._insteon_device_group.group}" return uid @property def name(self): """Return the name of the node (used for Entity_ID).""" # Set a base description description = self._insteon_device.description if description is None: description = "Unknown Device" # Get an extension label if there is one extension = self._get_label() if extension: extension = f" {extension}" return f"{description} {self._insteon_device.address}{extension}" @property def device_state_attributes(self): """Provide attributes for display on device card.""" return {"insteon_address": self.address, "insteon_group": self.group} @property def device_info(self): """Return device information.""" return { "identifiers": {(DOMAIN, str(self._insteon_device.address))}, "name": f"{self._insteon_device.description} {self._insteon_device.address}", "model": f"{self._insteon_device.model} ({self._insteon_device.cat!r}, 0x{self._insteon_device.subcat:02x})", "sw_version": f"{self._insteon_device.firmware:02x} Engine Version: {self._insteon_device.engine_version}", "manufacturer": "Smart Home", "via_device": (DOMAIN, str(devices.modem.address)), } @callback def async_entity_update(self, name, address, value, group): """Receive notification from transport that new data exists.""" _LOGGER.debug( "Received update for device %s group %d value %s", address, group, value, ) self.async_write_ha_state() async def async_added_to_hass(self): """Register INSTEON update events.""" _LOGGER.debug( "Tracking updates for device %s group %d name %s", self.address, self.group, self._insteon_device_group.name, ) self._insteon_device_group.subscribe(self.async_entity_update) load_signal = f"{self.entity_id}_{SIGNAL_LOAD_ALDB}" self.async_on_remove( async_dispatcher_connect(self.hass, load_signal, self._async_read_aldb) ) print_signal = f"{self.entity_id}_{SIGNAL_PRINT_ALDB}" async_dispatcher_connect(self.hass, print_signal, self._print_aldb) default_links_signal = f"{self.entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}" async_dispatcher_connect( self.hass, default_links_signal, self._async_add_default_links ) remove_signal = f"{self._insteon_device.address.id}_{SIGNAL_REMOVE_ENTITY}" self.async_on_remove( async_dispatcher_connect( self.hass, remove_signal, functools.partial(self.async_remove, force_remove=True), ) ) async def async_will_remove_from_hass(self): """Unsubscribe to INSTEON update events.""" _LOGGER.debug( "Remove tracking updates for device %s group %d name %s", self.address, self.group, self._insteon_device_group.name, ) self._insteon_device_group.unsubscribe(self.async_entity_update) async def _async_read_aldb(self, reload): """Call device load process and print to log.""" await self._insteon_device.aldb.async_load(refresh=reload) self._print_aldb() async_dispatcher_send(self.hass, SIGNAL_SAVE_DEVICES) def _print_aldb(self): """Print the device ALDB to the log file.""" print_aldb_to_log(self._insteon_device.aldb) def _get_label(self): """Get the device label for grouped devices.""" label = "" if len(self._insteon_device.groups) > 1: if self._insteon_device_group.name in STATE_NAME_LABEL_MAP: label = STATE_NAME_LABEL_MAP[self._insteon_device_group.name] else: label = f"Group {self.group:d}" return label async def _async_add_default_links(self): """Add default links between the device and the modem.""" await self._insteon_device.async_add_default_links()
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/insteon/insteon_entity.py
"""Config flow to configure the Toon component.""" import logging from typing import Any, Dict, List, Optional from toonapi import Agreement, Toon, ToonError import voluptuous as vol from homeassistant import config_entries from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler from .const import CONF_AGREEMENT, CONF_AGREEMENT_ID, CONF_MIGRATE, DOMAIN class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle a Toon config flow.""" CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH DOMAIN = DOMAIN VERSION = 2 agreements: Optional[List[Agreement]] = None data: Optional[Dict[str, Any]] = None @property def logger(self) -> logging.Logger: """Return logger.""" return logging.getLogger(__name__) async def async_oauth_create_entry(self, data: Dict[str, Any]) -> Dict[str, Any]: """Test connection and load up agreements.""" self.data = data toon = Toon( token=self.data["token"]["access_token"], session=async_get_clientsession(self.hass), ) try: self.agreements = await toon.agreements() except ToonError: return self.async_abort(reason="connection_error") if not self.agreements: return self.async_abort(reason="no_agreements") return await self.async_step_agreement() async def async_step_import( self, config: Optional[Dict[str, Any]] = None ) -> Dict[str, Any]: """Start a configuration flow based on imported data. This step is merely here to trigger "discovery" when the `toon` integration is listed in the user configuration, or when migrating from the version 1 schema. """ if config is not None and CONF_MIGRATE in config: self.context.update({CONF_MIGRATE: config[CONF_MIGRATE]}) else: await self._async_handle_discovery_without_unique_id() return await self.async_step_user() async def async_step_agreement( self, user_input: Dict[str, Any] = None ) -> Dict[str, Any]: """Select Toon agreement to add.""" if len(self.agreements) == 1: return await self._create_entry(self.agreements[0]) agreements_list = [ f"{agreement.street} {agreement.house_number}, {agreement.city}" for agreement in self.agreements ] if user_input is None: return self.async_show_form( step_id="agreement", data_schema=vol.Schema( {vol.Required(CONF_AGREEMENT): vol.In(agreements_list)} ), ) agreement_index = agreements_list.index(user_input[CONF_AGREEMENT]) return await self._create_entry(self.agreements[agreement_index]) async def _create_entry(self, agreement: Agreement) -> Dict[str, Any]: if CONF_MIGRATE in self.context: await self.hass.config_entries.async_remove(self.context[CONF_MIGRATE]) await self.async_set_unique_id(agreement.agreement_id) self._abort_if_unique_id_configured() self.data[CONF_AGREEMENT_ID] = agreement.agreement_id return self.async_create_entry( title=f"{agreement.street} {agreement.house_number}, {agreement.city}", data=self.data, )
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/toon/config_flow.py
"""Support for Bond fans.""" import logging import math from typing import Any, Callable, List, Optional, Tuple from bond_api import Action, BPUPSubscriptions, DeviceType, Direction from homeassistant.components.fan import ( DIRECTION_FORWARD, DIRECTION_REVERSE, SUPPORT_DIRECTION, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.util.percentage import ( percentage_to_ranged_value, ranged_value_to_percentage, ) from .const import BPUP_SUBS, DOMAIN, HUB from .entity import BondEntity from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up Bond fan devices.""" data = hass.data[DOMAIN][entry.entry_id] hub: BondHub = data[HUB] bpup_subs: BPUPSubscriptions = data[BPUP_SUBS] fans = [ BondFan(hub, device, bpup_subs) for device in hub.devices if DeviceType.is_fan(device.type) ] async_add_entities(fans, True) class BondFan(BondEntity, FanEntity): """Representation of a Bond fan.""" def __init__(self, hub: BondHub, device: BondDevice, bpup_subs: BPUPSubscriptions): """Create HA entity representing Bond fan.""" super().__init__(hub, device, bpup_subs) self._power: Optional[bool] = None self._speed: Optional[int] = None self._direction: Optional[int] = None def _apply_state(self, state: dict): self._power = state.get("power") self._speed = state.get("speed") self._direction = state.get("direction") @property def supported_features(self) -> int: """Flag supported features.""" features = 0 if self._device.supports_speed(): features |= SUPPORT_SET_SPEED if self._device.supports_direction(): features |= SUPPORT_DIRECTION return features @property def _speed_range(self) -> Tuple[int, int]: """Return the range of speeds.""" return (1, self._device.props.get("max_speed", 3)) @property def percentage(self) -> Optional[str]: """Return the current speed percentage for the fan.""" if not self._speed or not self._power: return 0 return ranged_value_to_percentage(self._speed_range, self._speed) @property def current_direction(self) -> Optional[str]: """Return fan rotation direction.""" direction = None if self._direction == Direction.FORWARD: direction = DIRECTION_FORWARD elif self._direction == Direction.REVERSE: direction = DIRECTION_REVERSE return direction async def async_set_percentage(self, percentage: int) -> None: """Set the desired speed for the fan.""" _LOGGER.debug("async_set_percentage called with percentage %s", percentage) if percentage == 0: await self.async_turn_off() return bond_speed = math.ceil( percentage_to_ranged_value(self._speed_range, percentage) ) _LOGGER.debug( "async_set_percentage converted percentage %s to bond speed %s", percentage, bond_speed, ) await self._hub.bond.action( self._device.device_id, Action.set_speed(bond_speed) ) async def async_turn_on( self, speed: Optional[str] = None, percentage: Optional[int] = None, preset_mode: Optional[str] = None, **kwargs, ) -> None: """Turn on the fan.""" _LOGGER.debug("Fan async_turn_on called with percentage %s", percentage) if percentage is not None: await self.async_set_percentage(percentage) else: await self._hub.bond.action(self._device.device_id, Action.turn_on()) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the fan off.""" await self._hub.bond.action(self._device.device_id, Action.turn_off()) async def async_set_direction(self, direction: str): """Set fan rotation direction.""" bond_direction = ( Direction.REVERSE if direction == DIRECTION_REVERSE else Direction.FORWARD ) await self._hub.bond.action( self._device.device_id, Action.set_direction(bond_direction) )
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/bond/fan.py
"""Support for Netgear LTE binary sensors.""" from homeassistant.components.binary_sensor import DOMAIN, BinarySensorEntity from homeassistant.exceptions import PlatformNotReady from . import CONF_MONITORED_CONDITIONS, DATA_KEY, LTEEntity from .sensor_types import BINARY_SENSOR_CLASSES async def async_setup_platform(hass, config, async_add_entities, discovery_info): """Set up Netgear LTE binary sensor devices.""" if discovery_info is None: return modem_data = hass.data[DATA_KEY].get_modem_data(discovery_info) if not modem_data or not modem_data.data: raise PlatformNotReady binary_sensor_conf = discovery_info[DOMAIN] monitored_conditions = binary_sensor_conf[CONF_MONITORED_CONDITIONS] binary_sensors = [] for sensor_type in monitored_conditions: binary_sensors.append(LTEBinarySensor(modem_data, sensor_type)) async_add_entities(binary_sensors) class LTEBinarySensor(LTEEntity, BinarySensorEntity): """Netgear LTE binary sensor entity.""" @property def is_on(self): """Return true if the binary sensor is on.""" return getattr(self.modem_data.data, self.sensor_type) @property def device_class(self): """Return the class of binary sensor.""" return BINARY_SENSOR_CLASSES[self.sensor_type]
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/netgear_lte/binary_sensor.py
"""Event parser and human readable log generator.""" from datetime import timedelta from itertools import groupby import json import re import sqlalchemy from sqlalchemy.orm import aliased from sqlalchemy.sql.expression import literal import voluptuous as vol from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED from homeassistant.components.history import sqlalchemy_filter_from_include_exclude_conf from homeassistant.components.http import HomeAssistantView from homeassistant.components.recorder.models import ( Events, States, process_timestamp_to_utc_isoformat, ) from homeassistant.components.recorder.util import session_scope from homeassistant.components.script import EVENT_SCRIPT_STARTED from homeassistant.const import ( ATTR_DOMAIN, ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_NAME, ATTR_SERVICE, EVENT_CALL_SERVICE, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_LOGBOOK_ENTRY, EVENT_STATE_CHANGED, HTTP_BAD_REQUEST, ) from homeassistant.core import DOMAIN as HA_DOMAIN, callback, split_entity_id from homeassistant.exceptions import InvalidEntityFormatError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entityfilter import ( INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, convert_include_exclude_filter, generate_filter, ) from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) from homeassistant.loader import bind_hass import homeassistant.util.dt as dt_util ENTITY_ID_JSON_TEMPLATE = '"entity_id": "{}"' ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": "([^"]+)"') DOMAIN_JSON_EXTRACT = re.compile('"domain": "([^"]+)"') ICON_JSON_EXTRACT = re.compile('"icon": "([^"]+)"') ATTR_MESSAGE = "message" CONTINUOUS_DOMAINS = ["proximity", "sensor"] DOMAIN = "logbook" GROUP_BY_MINUTES = 15 EMPTY_JSON_OBJECT = "{}" UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":' HA_DOMAIN_ENTITY_ID = f"{HA_DOMAIN}." CONFIG_SCHEMA = vol.Schema( {DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA ) HOMEASSISTANT_EVENTS = [ EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ] ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = [ EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE, *HOMEASSISTANT_EVENTS, ] ALL_EVENT_TYPES = [ EVENT_STATE_CHANGED, *ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, ] EVENT_COLUMNS = [ Events.event_type, Events.event_data, Events.time_fired, Events.context_id, Events.context_user_id, Events.context_parent_id, ] SCRIPT_AUTOMATION_EVENTS = [EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED] LOG_MESSAGE_SCHEMA = vol.Schema( { vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_MESSAGE): cv.template, vol.Optional(ATTR_DOMAIN): cv.slug, vol.Optional(ATTR_ENTITY_ID): cv.entity_id, } ) @bind_hass def log_entry(hass, name, message, domain=None, entity_id=None, context=None): """Add an entry to the logbook.""" hass.add_job(async_log_entry, hass, name, message, domain, entity_id, context) @bind_hass def async_log_entry(hass, name, message, domain=None, entity_id=None, context=None): """Add an entry to the logbook.""" data = {ATTR_NAME: name, ATTR_MESSAGE: message} if domain is not None: data[ATTR_DOMAIN] = domain if entity_id is not None: data[ATTR_ENTITY_ID] = entity_id hass.bus.async_fire(EVENT_LOGBOOK_ENTRY, data, context=context) async def async_setup(hass, config): """Logbook setup.""" hass.data[DOMAIN] = {} @callback def log_message(service): """Handle sending notification message service calls.""" message = service.data[ATTR_MESSAGE] name = service.data[ATTR_NAME] domain = service.data.get(ATTR_DOMAIN) entity_id = service.data.get(ATTR_ENTITY_ID) if entity_id is None and domain is None: # If there is no entity_id or # domain, the event will get filtered # away so we use the "logbook" domain domain = DOMAIN message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id) hass.components.frontend.async_register_built_in_panel( "logbook", "logbook", "hass:format-list-bulleted-type" ) conf = config.get(DOMAIN, {}) if conf: filters = sqlalchemy_filter_from_include_exclude_conf(conf) entities_filter = convert_include_exclude_filter(conf) else: filters = None entities_filter = None hass.http.register_view(LogbookView(conf, filters, entities_filter)) hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA) await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform) return True async def _process_logbook_platform(hass, domain, platform): """Process a logbook platform.""" @callback def _async_describe_event(domain, event_name, describe_callback): """Teach logbook how to describe a new event.""" hass.data[DOMAIN][event_name] = (domain, describe_callback) platform.async_describe_events(hass, _async_describe_event) class LogbookView(HomeAssistantView): """Handle logbook view requests.""" url = "/api/logbook" name = "api:logbook" extra_urls = ["/api/logbook/{datetime}"] def __init__(self, config, filters, entities_filter): """Initialize the logbook view.""" self.config = config self.filters = filters self.entities_filter = entities_filter async def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message("Invalid datetime", HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() period = request.query.get("period") if period is None: period = 1 else: period = int(period) entity_ids = request.query.get("entity") if entity_ids: try: entity_ids = cv.entity_ids(entity_ids) except vol.Invalid: raise InvalidEntityFormatError( f"Invalid entity id(s) encountered: {entity_ids}. " "Format should be <domain>.<object_id>" ) from vol.Invalid end_time = request.query.get("end_time") if end_time is None: start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1) end_day = start_day + timedelta(days=period) else: start_day = datetime end_day = dt_util.parse_datetime(end_time) if end_day is None: return self.json_message("Invalid end_time", HTTP_BAD_REQUEST) hass = request.app["hass"] entity_matches_only = "entity_matches_only" in request.query def json_events(): """Fetch events and generate JSON.""" return self.json( _get_events( hass, start_day, end_day, entity_ids, self.filters, self.entities_filter, entity_matches_only, ) ) return await hass.async_add_executor_job(json_events) def humanify(hass, events, entity_attr_cache, context_lookup): """Generate a converted list of events into Entry objects. Will try to group events if possible: - if 2+ sensor updates in GROUP_BY_MINUTES, show last - if Home Assistant stop and start happen in same minute call it restarted """ external_events = hass.data.get(DOMAIN, {}) # Group events in batches of GROUP_BY_MINUTES for _, g_events in groupby( events, lambda event: event.time_fired_minute // GROUP_BY_MINUTES ): events_batch = list(g_events) # Keep track of last sensor states last_sensor_event = {} # Group HA start/stop events # Maps minute of event to 1: stop, 2: stop + start start_stop_events = {} # Process events for event in events_batch: if event.event_type == EVENT_STATE_CHANGED: if event.domain in CONTINUOUS_DOMAINS: last_sensor_event[event.entity_id] = event elif event.event_type == EVENT_HOMEASSISTANT_STOP: if event.time_fired_minute in start_stop_events: continue start_stop_events[event.time_fired_minute] = 1 elif event.event_type == EVENT_HOMEASSISTANT_START: if event.time_fired_minute not in start_stop_events: continue start_stop_events[event.time_fired_minute] = 2 # Yield entries for event in events_batch: if event.event_type == EVENT_STATE_CHANGED: entity_id = event.entity_id domain = event.domain if ( domain in CONTINUOUS_DOMAINS and event != last_sensor_event[entity_id] ): # Skip all but the last sensor state continue data = { "when": event.time_fired_isoformat, "name": _entity_name_from_event( entity_id, event, entity_attr_cache ), "state": event.state, "entity_id": entity_id, } icon = event.attributes_icon if icon: data["icon"] = icon if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events, ) yield data elif event.event_type in external_events: domain, describe_event = external_events[event.event_type] data = describe_event(event) data["when"] = event.time_fired_isoformat data["domain"] = domain if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, data.get(ATTR_ENTITY_ID), event, context_lookup, entity_attr_cache, external_events, ) yield data elif event.event_type == EVENT_HOMEASSISTANT_START: if start_stop_events.get(event.time_fired_minute) == 2: continue yield { "when": event.time_fired_isoformat, "name": "Home Assistant", "message": "started", "domain": HA_DOMAIN, } elif event.event_type == EVENT_HOMEASSISTANT_STOP: if start_stop_events.get(event.time_fired_minute) == 2: action = "restarted" else: action = "stopped" yield { "when": event.time_fired_isoformat, "name": "Home Assistant", "message": action, "domain": HA_DOMAIN, } elif event.event_type == EVENT_LOGBOOK_ENTRY: event_data = event.data domain = event_data.get(ATTR_DOMAIN) entity_id = event_data.get(ATTR_ENTITY_ID) if domain is None and entity_id is not None: try: domain = split_entity_id(str(entity_id))[0] except IndexError: pass data = { "when": event.time_fired_isoformat, "name": event_data.get(ATTR_NAME), "message": event_data.get(ATTR_MESSAGE), "domain": domain, "entity_id": entity_id, } if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events, ) yield data def _get_events( hass, start_day, end_day, entity_ids=None, filters=None, entities_filter=None, entity_matches_only=False, ): """Get events for a period of time.""" entity_attr_cache = EntityAttributeCache(hass) context_lookup = {None: None} def yield_events(query): """Yield Events that are not filtered away.""" for row in query.yield_per(1000): event = LazyEventPartialState(row) context_lookup.setdefault(event.context_id, event) if event.event_type == EVENT_CALL_SERVICE: continue if event.event_type == EVENT_STATE_CHANGED or _keep_event( hass, event, entities_filter ): yield event if entity_ids is not None: entities_filter = generate_filter([], entity_ids, [], []) with session_scope(hass=hass) as session: old_state = aliased(States, name="old_state") if entity_ids is not None: query = _generate_events_query_without_states(session) query = _apply_event_time_filter(query, start_day, end_day) query = _apply_event_types_filter( hass, query, ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED ) if entity_matches_only: # When entity_matches_only is provided, contexts and events that do not # contain the entity_ids are not included in the logbook response. query = _apply_event_entity_id_matchers(query, entity_ids) query = query.union_all( _generate_states_query( session, start_day, end_day, old_state, entity_ids ) ) else: query = _generate_events_query(session) query = _apply_event_time_filter(query, start_day, end_day) query = _apply_events_types_and_states_filter( hass, query, old_state ).filter( (States.last_updated == States.last_changed) | (Events.event_type != EVENT_STATE_CHANGED) ) if filters: query = query.filter( filters.entity_filter() | (Events.event_type != EVENT_STATE_CHANGED) ) query = query.order_by(Events.time_fired) return list( humanify(hass, yield_events(query), entity_attr_cache, context_lookup) ) def _generate_events_query(session): return session.query( *EVENT_COLUMNS, States.state, States.entity_id, States.domain, States.attributes, ) def _generate_events_query_without_states(session): return session.query( *EVENT_COLUMNS, literal(None).label("state"), literal(None).label("entity_id"), literal(None).label("domain"), literal(None).label("attributes"), ) def _generate_states_query(session, start_day, end_day, old_state, entity_ids): return ( _generate_events_query(session) .outerjoin(Events, (States.event_id == Events.event_id)) .outerjoin(old_state, (States.old_state_id == old_state.state_id)) .filter(_missing_state_matcher(old_state)) .filter(_continuous_entity_matcher()) .filter((States.last_updated > start_day) & (States.last_updated < end_day)) .filter( (States.last_updated == States.last_changed) & States.entity_id.in_(entity_ids) ) ) def _apply_events_types_and_states_filter(hass, query, old_state): events_query = ( query.outerjoin(States, (Events.event_id == States.event_id)) .outerjoin(old_state, (States.old_state_id == old_state.state_id)) .filter( (Events.event_type != EVENT_STATE_CHANGED) | _missing_state_matcher(old_state) ) .filter( (Events.event_type != EVENT_STATE_CHANGED) | _continuous_entity_matcher() ) ) return _apply_event_types_filter(hass, events_query, ALL_EVENT_TYPES) def _missing_state_matcher(old_state): # The below removes state change events that do not have # and old_state or the old_state is missing (newly added entities) # or the new_state is missing (removed entities) return sqlalchemy.and_( old_state.state_id.isnot(None), (States.state != old_state.state), States.state.isnot(None), ) def _continuous_entity_matcher(): # # Prefilter out continuous domains that have # ATTR_UNIT_OF_MEASUREMENT as its much faster in sql. # return sqlalchemy.or_( sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS)), sqlalchemy.not_(States.attributes.contains(UNIT_OF_MEASUREMENT_JSON)), ) def _apply_event_time_filter(events_query, start_day, end_day): return events_query.filter( (Events.time_fired > start_day) & (Events.time_fired < end_day) ) def _apply_event_types_filter(hass, query, event_types): return query.filter( Events.event_type.in_(event_types + list(hass.data.get(DOMAIN, {}))) ) def _apply_event_entity_id_matchers(events_query, entity_ids): return events_query.filter( sqlalchemy.or_( *[ Events.event_data.contains(ENTITY_ID_JSON_TEMPLATE.format(entity_id)) for entity_id in entity_ids ] ) ) def _keep_event(hass, event, entities_filter): if event.event_type in HOMEASSISTANT_EVENTS: return entities_filter is None or entities_filter(HA_DOMAIN_ENTITY_ID) entity_id = event.data_entity_id if entity_id: return entities_filter is None or entities_filter(entity_id) if event.event_type in hass.data[DOMAIN]: # If the entity_id isn't described, use the domain that describes # the event for filtering. domain = hass.data[DOMAIN][event.event_type][0] else: domain = event.data_domain if domain is None: return False return entities_filter is None or entities_filter(f"{domain}.") def _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events ): context_event = context_lookup.get(event.context_id) if not context_event: return if event == context_event: # This is the first event with the given ID. Was it directly caused by # a parent event? if event.context_parent_id: context_event = context_lookup.get(event.context_parent_id) # Ensure the (parent) context_event exists and is not the root cause of # this log entry. if not context_event or event == context_event: return event_type = context_event.event_type context_entity_id = context_event.entity_id # State change if context_entity_id: data["context_entity_id"] = context_entity_id data["context_entity_id_name"] = _entity_name_from_event( context_entity_id, context_event, entity_attr_cache ) data["context_event_type"] = event_type return event_data = context_event.data # Call service if event_type == EVENT_CALL_SERVICE: event_data = context_event.data data["context_domain"] = event_data.get(ATTR_DOMAIN) data["context_service"] = event_data.get(ATTR_SERVICE) data["context_event_type"] = event_type return if not entity_id: return attr_entity_id = event_data.get(ATTR_ENTITY_ID) if not attr_entity_id or ( event_type in SCRIPT_AUTOMATION_EVENTS and attr_entity_id == entity_id ): return if context_event == event: return data["context_entity_id"] = attr_entity_id data["context_entity_id_name"] = _entity_name_from_event( attr_entity_id, context_event, entity_attr_cache ) data["context_event_type"] = event_type if event_type in external_events: domain, describe_event = external_events[event_type] data["context_domain"] = domain name = describe_event(context_event).get(ATTR_NAME) if name: data["context_name"] = name def _entity_name_from_event(entity_id, event, entity_attr_cache): """Extract the entity name from the event using the cache if possible.""" return entity_attr_cache.get( entity_id, ATTR_FRIENDLY_NAME, event ) or split_entity_id(entity_id)[1].replace("_", " ") class LazyEventPartialState: """A lazy version of core Event with limited State joined in.""" __slots__ = [ "_row", "_event_data", "_time_fired_isoformat", "_attributes", "event_type", "entity_id", "state", "domain", "context_id", "context_user_id", "context_parent_id", "time_fired_minute", ] def __init__(self, row): """Init the lazy event.""" self._row = row self._event_data = None self._time_fired_isoformat = None self._attributes = None self.event_type = self._row.event_type self.entity_id = self._row.entity_id self.state = self._row.state self.domain = self._row.domain self.context_id = self._row.context_id self.context_user_id = self._row.context_user_id self.context_parent_id = self._row.context_parent_id self.time_fired_minute = self._row.time_fired.minute @property def attributes_icon(self): """Extract the icon from the decoded attributes or json.""" if self._attributes: return self._attributes.get(ATTR_ICON) result = ICON_JSON_EXTRACT.search(self._row.attributes) return result and result.group(1) @property def data_entity_id(self): """Extract the entity id from the decoded data or json.""" if self._event_data: return self._event_data.get(ATTR_ENTITY_ID) result = ENTITY_ID_JSON_EXTRACT.search(self._row.event_data) return result and result.group(1) @property def data_domain(self): """Extract the domain from the decoded data or json.""" if self._event_data: return self._event_data.get(ATTR_DOMAIN) result = DOMAIN_JSON_EXTRACT.search(self._row.event_data) return result and result.group(1) @property def attributes(self): """State attributes.""" if not self._attributes: if ( self._row.attributes is None or self._row.attributes == EMPTY_JSON_OBJECT ): self._attributes = {} else: self._attributes = json.loads(self._row.attributes) return self._attributes @property def data(self): """Event data.""" if not self._event_data: if self._row.event_data == EMPTY_JSON_OBJECT: self._event_data = {} else: self._event_data = json.loads(self._row.event_data) return self._event_data @property def time_fired_isoformat(self): """Time event was fired in utc isoformat.""" if not self._time_fired_isoformat: self._time_fired_isoformat = process_timestamp_to_utc_isoformat( self._row.time_fired or dt_util.utcnow() ) return self._time_fired_isoformat class EntityAttributeCache: """A cache to lookup static entity_id attribute. This class should not be used to lookup attributes that are expected to change state. """ def __init__(self, hass): """Init the cache.""" self._hass = hass self._cache = {} def get(self, entity_id, attribute, event): """Lookup an attribute for an entity or get it from the cache.""" if entity_id in self._cache: if attribute in self._cache[entity_id]: return self._cache[entity_id][attribute] else: self._cache[entity_id] = {} current_state = self._hass.states.get(entity_id) if current_state: # Try the current state as its faster than decoding the # attributes self._cache[entity_id][attribute] = current_state.attributes.get(attribute) else: # If the entity has been removed, decode the attributes # instead self._cache[entity_id][attribute] = event.attributes.get(attribute) return self._cache[entity_id][attribute]
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/logbook/__init__.py
"""Support for Envisalink-based alarm control panels (Honeywell/DSC).""" import logging import voluptuous as vol from homeassistant.components.alarm_control_panel import ( FORMAT_NUMBER, AlarmControlPanelEntity, ) from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, SUPPORT_ALARM_ARM_NIGHT, SUPPORT_ALARM_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, CONF_CODE, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import ( CONF_PANIC, CONF_PARTITIONNAME, DATA_EVL, DOMAIN, PARTITION_SCHEMA, SIGNAL_KEYPAD_UPDATE, SIGNAL_PARTITION_UPDATE, EnvisalinkDevice, ) _LOGGER = logging.getLogger(__name__) SERVICE_ALARM_KEYPRESS = "alarm_keypress" ATTR_KEYPRESS = "keypress" ALARM_KEYPRESS_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_KEYPRESS): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Perform the setup for Envisalink alarm panels.""" configured_partitions = discovery_info["partitions"] code = discovery_info[CONF_CODE] panic_type = discovery_info[CONF_PANIC] devices = [] for part_num in configured_partitions: device_config_data = PARTITION_SCHEMA(configured_partitions[part_num]) device = EnvisalinkAlarm( hass, part_num, device_config_data[CONF_PARTITIONNAME], code, panic_type, hass.data[DATA_EVL].alarm_state["partition"][part_num], hass.data[DATA_EVL], ) devices.append(device) async_add_entities(devices) @callback def alarm_keypress_handler(service): """Map services to methods on Alarm.""" entity_ids = service.data.get(ATTR_ENTITY_ID) keypress = service.data.get(ATTR_KEYPRESS) target_devices = [ device for device in devices if device.entity_id in entity_ids ] for device in target_devices: device.async_alarm_keypress(keypress) hass.services.async_register( DOMAIN, SERVICE_ALARM_KEYPRESS, alarm_keypress_handler, schema=ALARM_KEYPRESS_SCHEMA, ) return True class EnvisalinkAlarm(EnvisalinkDevice, AlarmControlPanelEntity): """Representation of an Envisalink-based alarm panel.""" def __init__( self, hass, partition_number, alarm_name, code, panic_type, info, controller ): """Initialize the alarm panel.""" self._partition_number = partition_number self._code = code self._panic_type = panic_type _LOGGER.debug("Setting up alarm: %s", alarm_name) super().__init__(alarm_name, info, controller) async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_KEYPAD_UPDATE, self._update_callback ) ) self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_PARTITION_UPDATE, self._update_callback ) ) @callback def _update_callback(self, partition): """Update Home Assistant state, if needed.""" if partition is None or int(partition) == self._partition_number: self.async_write_ha_state() @property def code_format(self): """Regex for code format or None if no code is required.""" if self._code: return None return FORMAT_NUMBER @property def state(self): """Return the state of the device.""" state = STATE_UNKNOWN if self._info["status"]["alarm"]: state = STATE_ALARM_TRIGGERED elif self._info["status"]["armed_zero_entry_delay"]: state = STATE_ALARM_ARMED_NIGHT elif self._info["status"]["armed_away"]: state = STATE_ALARM_ARMED_AWAY elif self._info["status"]["armed_stay"]: state = STATE_ALARM_ARMED_HOME elif self._info["status"]["exit_delay"]: state = STATE_ALARM_PENDING elif self._info["status"]["entry_delay"]: state = STATE_ALARM_PENDING elif self._info["status"]["alpha"]: state = STATE_ALARM_DISARMED return state @property def supported_features(self) -> int: """Return the list of supported features.""" return ( SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT | SUPPORT_ALARM_TRIGGER ) async def async_alarm_disarm(self, code=None): """Send disarm command.""" if code: self.hass.data[DATA_EVL].disarm_partition(str(code), self._partition_number) else: self.hass.data[DATA_EVL].disarm_partition( str(self._code), self._partition_number ) async def async_alarm_arm_home(self, code=None): """Send arm home command.""" if code: self.hass.data[DATA_EVL].arm_stay_partition( str(code), self._partition_number ) else: self.hass.data[DATA_EVL].arm_stay_partition( str(self._code), self._partition_number ) async def async_alarm_arm_away(self, code=None): """Send arm away command.""" if code: self.hass.data[DATA_EVL].arm_away_partition( str(code), self._partition_number ) else: self.hass.data[DATA_EVL].arm_away_partition( str(self._code), self._partition_number ) async def async_alarm_trigger(self, code=None): """Alarm trigger command. Will be used to trigger a panic alarm.""" self.hass.data[DATA_EVL].panic_alarm(self._panic_type) async def async_alarm_arm_night(self, code=None): """Send arm night command.""" self.hass.data[DATA_EVL].arm_night_partition( str(code) if code else str(self._code), self._partition_number ) @callback def async_alarm_keypress(self, keypress=None): """Send custom keypress.""" if keypress: self.hass.data[DATA_EVL].keypresses_to_partition( self._partition_number, keypress )
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/envisalink/alarm_control_panel.py
"""Support for file notification.""" import os import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.const import CONF_FILENAME import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util CONF_TIMESTAMP = "timestamp" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_FILENAME): cv.string, vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean, } ) def get_service(hass, config, discovery_info=None): """Get the file notification service.""" filename = config[CONF_FILENAME] timestamp = config[CONF_TIMESTAMP] return FileNotificationService(hass, filename, timestamp) class FileNotificationService(BaseNotificationService): """Implement the notification service for the File service.""" def __init__(self, hass, filename, add_timestamp): """Initialize the service.""" self.filepath = os.path.join(hass.config.config_dir, filename) self.add_timestamp = add_timestamp def send_message(self, message="", **kwargs): """Send a message to a file.""" with open(self.filepath, "a") as file: if os.stat(self.filepath).st_size == 0: title = f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" file.write(title) if self.add_timestamp: text = f"{dt_util.utcnow().isoformat()} {message}\n" else: text = f"{message}\n" file.write(text)
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/file/notify.py
"""Config flow for Islamic Prayer Times integration.""" import voluptuous as vol from homeassistant import config_entries from homeassistant.core import callback # pylint: disable=unused-import from .const import CALC_METHODS, CONF_CALC_METHOD, DEFAULT_CALC_METHOD, DOMAIN, NAME class IslamicPrayerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle the Islamic Prayer config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return IslamicPrayerOptionsFlowHandler(config_entry) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") if user_input is None: return self.async_show_form(step_id="user") return self.async_create_entry(title=NAME, data=user_input) async def async_step_import(self, import_config): """Import from config.""" return await self.async_step_user(user_input=import_config) class IslamicPrayerOptionsFlowHandler(config_entries.OptionsFlow): """Handle Islamic Prayer client options.""" def __init__(self, config_entry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) options = { vol.Optional( CONF_CALC_METHOD, default=self.config_entry.options.get( CONF_CALC_METHOD, DEFAULT_CALC_METHOD ), ): vol.In(CALC_METHODS) } return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/islamic_prayer_times/config_flow.py
"""ONVIF event abstraction.""" import asyncio import datetime as dt from typing import Callable, Dict, List, Optional, Set from httpx import RemoteProtocolError, TransportError from onvif import ONVIFCamera, ONVIFService from zeep.exceptions import Fault from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback from homeassistant.helpers.event import async_call_later from homeassistant.util import dt as dt_util from .const import LOGGER from .models import Event from .parsers import PARSERS UNHANDLED_TOPICS = set() SUBSCRIPTION_ERRORS = ( Fault, asyncio.TimeoutError, TransportError, ) class EventManager: """ONVIF Event Manager.""" def __init__(self, hass: HomeAssistant, device: ONVIFCamera, unique_id: str): """Initialize event manager.""" self.hass: HomeAssistant = hass self.device: ONVIFCamera = device self.unique_id: str = unique_id self.started: bool = False self._subscription: ONVIFService = None self._events: Dict[str, Event] = {} self._listeners: List[CALLBACK_TYPE] = [] self._unsub_refresh: Optional[CALLBACK_TYPE] = None super().__init__() @property def platforms(self) -> Set[str]: """Return platforms to setup.""" return {event.platform for event in self._events.values()} @callback def async_add_listener(self, update_callback: CALLBACK_TYPE) -> Callable[[], None]: """Listen for data updates.""" # This is the first listener, set up polling. if not self._listeners: self.async_schedule_pull() self._listeners.append(update_callback) @callback def remove_listener() -> None: """Remove update listener.""" self.async_remove_listener(update_callback) return remove_listener @callback def async_remove_listener(self, update_callback: CALLBACK_TYPE) -> None: """Remove data update.""" if update_callback in self._listeners: self._listeners.remove(update_callback) if not self._listeners and self._unsub_refresh: self._unsub_refresh() self._unsub_refresh = None async def async_start(self) -> bool: """Start polling events.""" if await self.device.create_pullpoint_subscription(): # Create subscription manager self._subscription = self.device.create_subscription_service( "PullPointSubscription" ) # Renew immediately await self.async_renew() # Initialize events pullpoint = self.device.create_pullpoint_service() try: await pullpoint.SetSynchronizationPoint() except SUBSCRIPTION_ERRORS: pass response = await pullpoint.PullMessages( {"MessageLimit": 100, "Timeout": dt.timedelta(seconds=5)} ) # Parse event initialization await self.async_parse_messages(response.NotificationMessage) self.started = True return True return False async def async_stop(self) -> None: """Unsubscribe from events.""" self._listeners = [] self.started = False if not self._subscription: return await self._subscription.Unsubscribe() self._subscription = None async def async_restart(self, _now: dt = None) -> None: """Restart the subscription assuming the camera rebooted.""" if not self.started: return if self._subscription: try: await self._subscription.Unsubscribe() except SUBSCRIPTION_ERRORS: pass # Ignored. The subscription may no longer exist. self._subscription = None try: restarted = await self.async_start() except SUBSCRIPTION_ERRORS: restarted = False if not restarted: LOGGER.warning( "Failed to restart ONVIF PullPoint subscription for '%s'. Retrying...", self.unique_id, ) # Try again in a minute self._unsub_refresh = async_call_later(self.hass, 60, self.async_restart) elif self._listeners: LOGGER.debug( "Restarted ONVIF PullPoint subscription for '%s'", self.unique_id ) self.async_schedule_pull() async def async_renew(self) -> None: """Renew subscription.""" if not self._subscription: return termination_time = ( (dt_util.utcnow() + dt.timedelta(days=1)) .isoformat(timespec="seconds") .replace("+00:00", "Z") ) await self._subscription.Renew(termination_time) def async_schedule_pull(self) -> None: """Schedule async_pull_messages to run.""" self._unsub_refresh = async_call_later(self.hass, 1, self.async_pull_messages) async def async_pull_messages(self, _now: dt = None) -> None: """Pull messages from device.""" if self.hass.state == CoreState.running: try: pullpoint = self.device.create_pullpoint_service() response = await pullpoint.PullMessages( {"MessageLimit": 100, "Timeout": dt.timedelta(seconds=60)} ) # Renew subscription if less than two hours is left if ( dt_util.as_utc(response.TerminationTime) - dt_util.utcnow() ).total_seconds() < 7200: await self.async_renew() except RemoteProtocolError: # Likley a shutdown event, nothing to see here return except SUBSCRIPTION_ERRORS as err: LOGGER.warning( "Failed to fetch ONVIF PullPoint subscription messages for '%s': %s", self.unique_id, err, ) # Treat errors as if the camera restarted. Assume that the pullpoint # subscription is no longer valid. self._unsub_refresh = None await self.async_restart() return # Parse response await self.async_parse_messages(response.NotificationMessage) # Update entities for update_callback in self._listeners: update_callback() # Reschedule another pull if self._listeners: self.async_schedule_pull() # pylint: disable=protected-access async def async_parse_messages(self, messages) -> None: """Parse notification message.""" for msg in messages: # Guard against empty message if not msg.Topic: continue topic = msg.Topic._value_1 parser = PARSERS.get(topic) if not parser: if topic not in UNHANDLED_TOPICS: LOGGER.info( "No registered handler for event from %s: %s", self.unique_id, msg, ) UNHANDLED_TOPICS.add(topic) continue event = await parser(self.unique_id, msg) if not event: LOGGER.warning("Unable to parse event from %s: %s", self.unique_id, msg) return self._events[event.uid] = event def get_uid(self, uid) -> Event: """Retrieve event for given id.""" return self._events[uid] def get_platform(self, platform) -> List[Event]: """Retrieve events for given platform.""" return [event for event in self._events.values() if event.platform == platform]
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/onvif/event.py
"""Support for USCIS Case Status.""" from datetime import timedelta import logging import uscisstatus import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "USCIS" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Required("case_id"): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the platform in Home Assistant and Case Information.""" uscis = UscisSensor(config["case_id"], config[CONF_NAME]) uscis.update() if uscis.valid_case_id: add_entities([uscis]) else: _LOGGER.error("Setup USCIS Sensor Fail check if your Case ID is Valid") class UscisSensor(Entity): """USCIS Sensor will check case status on daily basis.""" MIN_TIME_BETWEEN_UPDATES = timedelta(hours=24) CURRENT_STATUS = "current_status" LAST_CASE_UPDATE = "last_update_date" def __init__(self, case, name): """Initialize the sensor.""" self._state = None self._case_id = case self._attributes = None self.valid_case_id = None self._name = name @property def name(self): """Return the name.""" return self._name @property def state(self): """Return the state.""" return self._state @property def device_state_attributes(self): """Return the state attributes.""" return self._attributes @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Fetch data from the USCIS website and update state attributes.""" try: status = uscisstatus.get_case_status(self._case_id) self._attributes = {self.CURRENT_STATUS: status["status"]} self._state = status["date"] self.valid_case_id = True except ValueError: _LOGGER("Please Check that you have valid USCIS case id") self.valid_case_id = False
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/uscis/sensor.py
"""Config flow for Rollease Acmeda Automate Pulse Hub.""" import asyncio from typing import Dict, Optional import aiopulse import async_timeout import voluptuous as vol from homeassistant import config_entries from .const import DOMAIN # pylint: disable=unused-import class AcmedaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Acmeda config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.discovered_hubs: Optional[Dict[str, aiopulse.Hub]] = None async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if ( user_input is not None and self.discovered_hubs is not None and user_input["id"] in self.discovered_hubs ): return await self.async_create(self.discovered_hubs[user_input["id"]]) # Already configured hosts already_configured = { entry.unique_id for entry in self._async_current_entries() } hubs = [] try: with async_timeout.timeout(5): async for hub in aiopulse.Hub.discover(): if hub.id not in already_configured: hubs.append(hub) except asyncio.TimeoutError: pass if len(hubs) == 0: return self.async_abort(reason="no_devices_found") if len(hubs) == 1: return await self.async_create(hubs[0]) self.discovered_hubs = {hub.id: hub for hub in hubs} return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required("id"): vol.In( {hub.id: f"{hub.id} {hub.host}" for hub in hubs} ) } ), ) async def async_create(self, hub): """Create the Acmeda Hub entry.""" await self.async_set_unique_id(hub.id, raise_on_progress=False) return self.async_create_entry(title=hub.id, data={"host": hub.host})
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/acmeda/config_flow.py
"""A sensor platform that give you information about the next space launch.""" from datetime import timedelta import logging from typing import Optional from pylaunches import PyLaunches, PyLaunchesException import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .const import ( ATTR_AGENCY, ATTR_AGENCY_COUNTRY_CODE, ATTR_LAUNCH_TIME, ATTR_STREAM, ATTRIBUTION, DEFAULT_NAME, ) _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(hours=1) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Create the launch sensor.""" name = config[CONF_NAME] session = async_get_clientsession(hass) launches = PyLaunches(session) async_add_entities([LaunchLibrarySensor(launches, name)], True) class LaunchLibrarySensor(Entity): """Representation of a launch_library Sensor.""" def __init__(self, launches: PyLaunches, name: str) -> None: """Initialize the sensor.""" self.launches = launches self.next_launch = None self._name = name async def async_update(self) -> None: """Get the latest data.""" try: launches = await self.launches.upcoming_launches() except PyLaunchesException as exception: _LOGGER.error("Error getting data, %s", exception) else: if launches: self.next_launch = launches[0] @property def name(self) -> str: """Return the name of the sensor.""" return self._name @property def state(self) -> Optional[str]: """Return the state of the sensor.""" if self.next_launch: return self.next_launch.name return None @property def icon(self) -> str: """Return the icon of the sensor.""" return "mdi:rocket" @property def device_state_attributes(self) -> Optional[dict]: """Return attributes for the sensor.""" if self.next_launch: return { ATTR_LAUNCH_TIME: self.next_launch.net, ATTR_AGENCY: self.next_launch.launch_service_provider.name, ATTR_AGENCY_COUNTRY_CODE: self.next_launch.pad.location.country_code, ATTR_STREAM: self.next_launch.webcast_live, ATTR_ATTRIBUTION: ATTRIBUTION, } return None
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/launch_library/sensor.py
"""Reproduce an Timer state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import ( ATTR_DURATION, DOMAIN, SERVICE_CANCEL, SERVICE_PAUSE, SERVICE_START, STATUS_ACTIVE, STATUS_IDLE, STATUS_PAUSED, ) _LOGGER = logging.getLogger(__name__) VALID_STATES = {STATUS_IDLE, STATUS_ACTIVE, STATUS_PAUSED} async def _async_reproduce_state( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce a single state.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in VALID_STATES: _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return # Return if we are already at the right state. if cur_state.state == state.state and cur_state.attributes.get( ATTR_DURATION ) == state.attributes.get(ATTR_DURATION): return service_data = {ATTR_ENTITY_ID: state.entity_id} if state.state == STATUS_ACTIVE: service = SERVICE_START if ATTR_DURATION in state.attributes: service_data[ATTR_DURATION] = state.attributes[ATTR_DURATION] elif state.state == STATUS_PAUSED: service = SERVICE_PAUSE elif state.state == STATUS_IDLE: service = SERVICE_CANCEL await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce Timer states.""" await asyncio.gather( *( _async_reproduce_state( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/timer/reproduce_state.py
"""Support for Neato sensors.""" from datetime import timedelta import logging from pybotvac.exceptions import NeatoRobotException from homeassistant.components.sensor import DEVICE_CLASS_BATTERY from homeassistant.const import PERCENTAGE from homeassistant.helpers.entity import Entity from .const import NEATO_DOMAIN, NEATO_LOGIN, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES) BATTERY = "Battery" async def async_setup_entry(hass, entry, async_add_entities): """Set up the Neato sensor using config entry.""" dev = [] neato = hass.data.get(NEATO_LOGIN) for robot in hass.data[NEATO_ROBOTS]: dev.append(NeatoSensor(neato, robot)) if not dev: return _LOGGER.debug("Adding robots for sensors %s", dev) async_add_entities(dev, True) class NeatoSensor(Entity): """Neato sensor.""" def __init__(self, neato, robot): """Initialize Neato sensor.""" self.robot = robot self._available = False self._robot_name = f"{self.robot.name} {BATTERY}" self._robot_serial = self.robot.serial self._state = None def update(self): """Update Neato Sensor.""" try: self._state = self.robot.state except NeatoRobotException as ex: if self._available: _LOGGER.error( "Neato sensor connection error for '%s': %s", self.entity_id, ex ) self._state = None self._available = False return self._available = True _LOGGER.debug("self._state=%s", self._state) @property def name(self): """Return the name of this sensor.""" return self._robot_name @property def unique_id(self): """Return unique ID.""" return self._robot_serial @property def device_class(self): """Return the device class.""" return DEVICE_CLASS_BATTERY @property def available(self): """Return availability.""" return self._available @property def state(self): """Return the state.""" return self._state["details"]["charge"] @property def unit_of_measurement(self): """Return unit of measurement.""" return PERCENTAGE @property def device_info(self): """Device info for neato robot.""" return {"identifiers": {(NEATO_DOMAIN, self._robot_serial)}}
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/neato/sensor.py
"""Support for Synology DSM binary sensors.""" from typing import Dict from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DISKS from homeassistant.helpers.typing import HomeAssistantType from . import SynologyDSMDeviceEntity, SynologyDSMDispatcherEntity from .const import ( DOMAIN, SECURITY_BINARY_SENSORS, STORAGE_DISK_BINARY_SENSORS, SYNO_API, UPGRADE_BINARY_SENSORS, ) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the Synology NAS binary sensor.""" api = hass.data[DOMAIN][entry.unique_id][SYNO_API] entities = [ SynoDSMSecurityBinarySensor( api, sensor_type, SECURITY_BINARY_SENSORS[sensor_type] ) for sensor_type in SECURITY_BINARY_SENSORS ] entities += [ SynoDSMUpgradeBinarySensor( api, sensor_type, UPGRADE_BINARY_SENSORS[sensor_type] ) for sensor_type in UPGRADE_BINARY_SENSORS ] # Handle all disks if api.storage.disks_ids: for disk in entry.data.get(CONF_DISKS, api.storage.disks_ids): entities += [ SynoDSMStorageBinarySensor( api, sensor_type, STORAGE_DISK_BINARY_SENSORS[sensor_type], disk ) for sensor_type in STORAGE_DISK_BINARY_SENSORS ] async_add_entities(entities) class SynoDSMSecurityBinarySensor(SynologyDSMDispatcherEntity, BinarySensorEntity): """Representation a Synology Security binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.security, self.entity_type) != "safe" @property def available(self) -> bool: """Return True if entity is available.""" return bool(self._api.security) @property def device_state_attributes(self) -> Dict[str, str]: """Return security checks details.""" return self._api.security.status_by_check class SynoDSMStorageBinarySensor(SynologyDSMDeviceEntity, BinarySensorEntity): """Representation a Synology Storage binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.storage, self.entity_type)(self._device_id) class SynoDSMUpgradeBinarySensor(SynologyDSMDispatcherEntity, BinarySensorEntity): """Representation a Synology Upgrade binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.upgrade, self.entity_type) @property def available(self) -> bool: """Return True if entity is available.""" return bool(self._api.upgrade)
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/synology_dsm/binary_sensor.py
"""Support for Powerview scenes from a Powerview hub.""" from typing import Any from aiopvapi.resources.scene import Scene as PvScene import voluptuous as vol from homeassistant.components.scene import Scene from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST, CONF_PLATFORM import homeassistant.helpers.config_validation as cv from .const import ( COORDINATOR, DEVICE_INFO, DOMAIN, HUB_ADDRESS, PV_API, PV_ROOM_DATA, PV_SCENE_DATA, ROOM_NAME_UNICODE, STATE_ATTRIBUTE_ROOM_NAME, ) from .entity import HDEntity PLATFORM_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): DOMAIN, vol.Required(HUB_ADDRESS): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Import platform from yaml.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: config[HUB_ADDRESS]}, ) ) async def async_setup_entry(hass, entry, async_add_entities): """Set up powerview scene entries.""" pv_data = hass.data[DOMAIN][entry.entry_id] room_data = pv_data[PV_ROOM_DATA] scene_data = pv_data[PV_SCENE_DATA] pv_request = pv_data[PV_API] coordinator = pv_data[COORDINATOR] device_info = pv_data[DEVICE_INFO] pvscenes = ( PowerViewScene( PvScene(raw_scene, pv_request), room_data, coordinator, device_info ) for scene_id, raw_scene in scene_data.items() ) async_add_entities(pvscenes) class PowerViewScene(HDEntity, Scene): """Representation of a Powerview scene.""" def __init__(self, scene, room_data, coordinator, device_info): """Initialize the scene.""" super().__init__(coordinator, device_info, scene.id) self._scene = scene self._room_name = room_data.get(scene.room_id, {}).get(ROOM_NAME_UNICODE, "") @property def name(self): """Return the name of the scene.""" return self._scene.name @property def device_state_attributes(self): """Return the state attributes.""" return {STATE_ATTRIBUTE_ROOM_NAME: self._room_name} @property def icon(self): """Icon to use in the frontend.""" return "mdi:blinds" async def async_activate(self, **kwargs: Any) -> None: """Activate scene. Try to get entities into requested state.""" await self._scene.activate()
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/hunterdouglas_powerview/scene.py
"""The Global Disaster Alert and Coordination System (GDACS) integration.""" import asyncio from datetime import timedelta import logging from aio_georss_gdacs import GdacsFeedManager import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_SCAN_INTERVAL, CONF_UNIT_SYSTEM_IMPERIAL, LENGTH_MILES, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.unit_system import METRIC_SYSTEM from .const import ( CONF_CATEGORIES, DEFAULT_RADIUS, DEFAULT_SCAN_INTERVAL, DOMAIN, FEED, PLATFORMS, VALID_CATEGORIES, ) _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.Coerce(float), vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): cv.time_period, vol.Optional(CONF_CATEGORIES, default=[]): vol.All( cv.ensure_list, [vol.In(VALID_CATEGORIES)] ), } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the GDACS component.""" if DOMAIN not in config: return True conf = config[DOMAIN] latitude = conf.get(CONF_LATITUDE, hass.config.latitude) longitude = conf.get(CONF_LONGITUDE, hass.config.longitude) scan_interval = conf[CONF_SCAN_INTERVAL] categories = conf[CONF_CATEGORIES] hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={ CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude, CONF_RADIUS: conf[CONF_RADIUS], CONF_SCAN_INTERVAL: scan_interval, CONF_CATEGORIES: categories, }, ) ) return True async def async_setup_entry(hass, config_entry): """Set up the GDACS component as config entry.""" hass.data.setdefault(DOMAIN, {}) feeds = hass.data[DOMAIN].setdefault(FEED, {}) radius = config_entry.data[CONF_RADIUS] if hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL: radius = METRIC_SYSTEM.length(radius, LENGTH_MILES) # Create feed entity manager for all platforms. manager = GdacsFeedEntityManager(hass, config_entry, radius) feeds[config_entry.entry_id] = manager _LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id) await manager.async_init() return True async def async_unload_entry(hass, config_entry): """Unload an GDACS component config entry.""" manager = hass.data[DOMAIN][FEED].pop(config_entry.entry_id) await manager.async_stop() await asyncio.wait( [ hass.config_entries.async_forward_entry_unload(config_entry, domain) for domain in PLATFORMS ] ) return True class GdacsFeedEntityManager: """Feed Entity Manager for GDACS feed.""" def __init__(self, hass, config_entry, radius_in_km): """Initialize the Feed Entity Manager.""" self._hass = hass self._config_entry = config_entry coordinates = ( config_entry.data[CONF_LATITUDE], config_entry.data[CONF_LONGITUDE], ) categories = config_entry.data[CONF_CATEGORIES] websession = aiohttp_client.async_get_clientsession(hass) self._feed_manager = GdacsFeedManager( websession, self._generate_entity, self._update_entity, self._remove_entity, coordinates, filter_radius=radius_in_km, filter_categories=categories, status_async_callback=self._status_update, ) self._config_entry_id = config_entry.entry_id self._scan_interval = timedelta(seconds=config_entry.data[CONF_SCAN_INTERVAL]) self._track_time_remove_callback = None self._status_info = None self.listeners = [] async def async_init(self): """Schedule initial and regular updates based on configured time interval.""" for domain in PLATFORMS: self._hass.async_create_task( self._hass.config_entries.async_forward_entry_setup( self._config_entry, domain ) ) async def update(event_time): """Update.""" await self.async_update() # Trigger updates at regular intervals. self._track_time_remove_callback = async_track_time_interval( self._hass, update, self._scan_interval ) _LOGGER.debug("Feed entity manager initialized") async def async_update(self): """Refresh data.""" await self._feed_manager.update() _LOGGER.debug("Feed entity manager updated") async def async_stop(self): """Stop this feed entity manager from refreshing.""" for unsub_dispatcher in self.listeners: unsub_dispatcher() self.listeners = [] if self._track_time_remove_callback: self._track_time_remove_callback() _LOGGER.debug("Feed entity manager stopped") @callback def async_event_new_entity(self): """Return manager specific event to signal new entity.""" return f"gdacs_new_geolocation_{self._config_entry_id}" def get_entry(self, external_id): """Get feed entry by external id.""" return self._feed_manager.feed_entries.get(external_id) def status_info(self): """Return latest status update info received.""" return self._status_info async def _generate_entity(self, external_id): """Generate new entity.""" async_dispatcher_send( self._hass, self.async_event_new_entity(), self, self._config_entry.unique_id, external_id, ) async def _update_entity(self, external_id): """Update entity.""" async_dispatcher_send(self._hass, f"gdacs_update_{external_id}") async def _remove_entity(self, external_id): """Remove entity.""" async_dispatcher_send(self._hass, f"gdacs_delete_{external_id}") async def _status_update(self, status_info): """Propagate status update.""" _LOGGER.debug("Status update received: %s", status_info) self._status_info = status_info async_dispatcher_send(self._hass, f"gdacs_status_{self._config_entry_id}")
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/gdacs/__init__.py
"""Support for Nexia / Trane XL Thermostats.""" from homeassistant.components.binary_sensor import BinarySensorEntity from .const import DOMAIN, NEXIA_DEVICE, UPDATE_COORDINATOR from .entity import NexiaThermostatEntity async def async_setup_entry(hass, config_entry, async_add_entities): """Set up sensors for a Nexia device.""" nexia_data = hass.data[DOMAIN][config_entry.entry_id] nexia_home = nexia_data[NEXIA_DEVICE] coordinator = nexia_data[UPDATE_COORDINATOR] entities = [] for thermostat_id in nexia_home.get_thermostat_ids(): thermostat = nexia_home.get_thermostat_by_id(thermostat_id) entities.append( NexiaBinarySensor( coordinator, thermostat, "is_blower_active", "Blower Active" ) ) if thermostat.has_emergency_heat(): entities.append( NexiaBinarySensor( coordinator, thermostat, "is_emergency_heat_active", "Emergency Heat Active", ) ) async_add_entities(entities, True) class NexiaBinarySensor(NexiaThermostatEntity, BinarySensorEntity): """Provices Nexia BinarySensor support.""" def __init__(self, coordinator, thermostat, sensor_call, sensor_name): """Initialize the nexia sensor.""" super().__init__( coordinator, thermostat, name=f"{thermostat.get_name()} {sensor_name}", unique_id=f"{thermostat.thermostat_id}_{sensor_call}", ) self._call = sensor_call self._state = None @property def is_on(self): """Return the status of the sensor.""" return getattr(self._thermostat, self._call)()
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/nexia/binary_sensor.py
"""Support for Twilio.""" from twilio.rest import Client from twilio.twiml import TwiML import voluptuous as vol from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.helpers import config_entry_flow import homeassistant.helpers.config_validation as cv from .const import DOMAIN CONF_ACCOUNT_SID = "account_sid" CONF_AUTH_TOKEN = "auth_token" DATA_TWILIO = DOMAIN RECEIVED_DATA = f"{DOMAIN}_data_received" CONFIG_SCHEMA = vol.Schema( { vol.Optional(DOMAIN): vol.Schema( { vol.Required(CONF_ACCOUNT_SID): cv.string, vol.Required(CONF_AUTH_TOKEN): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the Twilio component.""" if DOMAIN not in config: return True conf = config[DOMAIN] hass.data[DATA_TWILIO] = Client( conf.get(CONF_ACCOUNT_SID), conf.get(CONF_AUTH_TOKEN) ) return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook from Twilio for inbound messages and calls.""" data = dict(await request.post()) data["webhook_id"] = webhook_id hass.bus.async_fire(RECEIVED_DATA, dict(data)) return TwiML().to_xml() async def async_setup_entry(hass, entry): """Configure based on config entry.""" hass.components.webhook.async_register( DOMAIN, "Twilio", entry.data[CONF_WEBHOOK_ID], handle_webhook ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) return True async_remove_entry = config_entry_flow.webhook_async_remove_entry
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/twilio/__init__.py
"""Support for Lupusec Security System switches.""" from datetime import timedelta import lupupy.constants as CONST from homeassistant.components.switch import SwitchEntity from . import DOMAIN as LUPUSEC_DOMAIN, LupusecDevice SCAN_INTERVAL = timedelta(seconds=2) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Lupusec switch devices.""" if discovery_info is None: return data = hass.data[LUPUSEC_DOMAIN] devices = [] for device in data.lupusec.get_devices(generic_type=CONST.TYPE_SWITCH): devices.append(LupusecSwitch(data, device)) add_entities(devices) class LupusecSwitch(LupusecDevice, SwitchEntity): """Representation of a Lupusec switch.""" def turn_on(self, **kwargs): """Turn on the device.""" self._device.switch_on() def turn_off(self, **kwargs): """Turn off the device.""" self._device.switch_off() @property def is_on(self): """Return true if device is on.""" return self._device.is_on
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/lupusec/switch.py
"""Reproduce an input boolean state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import DOMAIN _LOGGER = logging.getLogger(__name__) async def _async_reproduce_states( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce input boolean states.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in (STATE_ON, STATE_OFF): _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return if cur_state.state == state.state: return service = SERVICE_TURN_ON if state.state == STATE_ON else SERVICE_TURN_OFF await hass.services.async_call( DOMAIN, service, {ATTR_ENTITY_ID: state.entity_id}, context=context, blocking=True, ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce component states.""" await asyncio.gather( *( _async_reproduce_states( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/input_boolean/reproduce_state.py
"""The ATAG Integration.""" from datetime import timedelta import logging import async_timeout from pyatag import AtagException, AtagOne from homeassistant.components.climate import DOMAIN as CLIMATE from homeassistant.components.sensor import DOMAIN as SENSOR from homeassistant.components.water_heater import DOMAIN as WATER_HEATER from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, asyncio from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) _LOGGER = logging.getLogger(__name__) DOMAIN = "atag" PLATFORMS = [CLIMATE, WATER_HEATER, SENSOR] async def async_setup(hass: HomeAssistant, config): """Set up the Atag component.""" return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Atag integration from a config entry.""" session = async_get_clientsession(hass) coordinator = AtagDataUpdateCoordinator(hass, session, entry) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator if entry.unique_id is None: hass.config_entries.async_update_entry(entry, unique_id=coordinator.atag.id) for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True class AtagDataUpdateCoordinator(DataUpdateCoordinator): """Define an object to hold Atag data.""" def __init__(self, hass, session, entry): """Initialize.""" self.atag = AtagOne(session=session, **entry.data) super().__init__( hass, _LOGGER, name=DOMAIN, update_interval=timedelta(seconds=30) ) async def _async_update_data(self): """Update data via library.""" with async_timeout.timeout(20): try: if not await self.atag.update(): raise UpdateFailed("No data received") except AtagException as error: raise UpdateFailed(error) from error return self.atag.report async def async_unload_entry(hass, entry): """Unload Atag config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok class AtagEntity(CoordinatorEntity): """Defines a base Atag entity.""" def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None: """Initialize the Atag entity.""" super().__init__(coordinator) self._id = atag_id self._name = DOMAIN.title() @property def device_info(self) -> dict: """Return info for device registry.""" device = self.coordinator.atag.id version = self.coordinator.atag.apiversion return { "identifiers": {(DOMAIN, device)}, "name": "Atag Thermostat", "model": "Atag One", "sw_version": version, "manufacturer": "Atag", } @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def unique_id(self): """Return a unique ID to use for this entity.""" return f"{self.coordinator.atag.id}-{self._id}"
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/atag/__init__.py
"""The Dune HD component.""" import asyncio from pdunehd import DuneHDPlayer from homeassistant.const import CONF_HOST from .const import DOMAIN PLATFORMS = ["media_player"] async def async_setup(hass, config): """Set up the Dune HD component.""" return True async def async_setup_entry(hass, config_entry): """Set up a config entry.""" host = config_entry.data[CONF_HOST] player = DuneHDPlayer(host) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][config_entry.entry_id] = player for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, component) ) return True async def async_unload_entry(hass, config_entry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(config_entry.entry_id) return unload_ok
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/dunehd/__init__.py