input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
""" Support for Epson projector. For more details about this component, please refer to the documentation at https://home-assistant.io/components/media_player.epson/ """ import logging import voluptuous as vol from homeassistant.components.media_player import ( MediaPlayerDevice, MEDIA_PLAYER_SCHEMA, PLATFORM_SCHEMA) from homeassistant.components.media_player.const import ( DOMAIN, SUPPORT_NEXT_TRACK, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP) from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, CONF_NAME, CONF_PORT, CONF_SSL, STATE_OFF, STATE_ON) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['epson-projector==0.1.3'] _LOGGER = logging.getLogger(__name__) ATTR_CMODE = 'cmode' DATA_EPSON = 'epson' DEFAULT_NAME = 'EPSON Projector' SERVICE_SELECT_CMODE = 'epson_select_cmode' SUPPORT_CMODE = 33001 SUPPORT_EPSON = SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE |\ SUPPORT_CMODE | SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | \ SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=80): cv.port, vol.Optional(CONF_SSL, default=False): cv.boolean, }) async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the Epson media player platform.""" from epson_projector.const import (CMODE_LIST_SET) if DATA_EPSON not in hass.data: hass.data[DATA_EPSON] = [] name = config.get(CONF_NAME) host = config.get(CONF_HOST) port = config.get(CONF_PORT) ssl = config.get(CONF_SSL) epson = EpsonProjector(async_get_clientsession( hass, verify_ssl=False), name, host, port, ssl) hass.data[DATA_EPSON].append(epson) async_add_entities([epson], update_before_add=True) async def async_service_handler(service): """Handle for services.""" entity_ids = service.data.get(ATTR_ENTITY_ID) if entity_ids: devices = [device for device in hass.data[DATA_EPSON] if device.entity_id in entity_ids] else: devices = hass.data[DATA_EPSON] for device in devices: if service.service == SERVICE_SELECT_CMODE: cmode = service.data.get(ATTR_CMODE) await device.select_cmode(cmode) device.async_schedule_update_ha_state(True) epson_schema = MEDIA_PLAYER_SCHEMA.extend({ vol.Required(ATTR_CMODE): vol.All(cv.string, vol.Any(*CMODE_LIST_SET)) }) hass.services.async_register( DOMAIN, SERVICE_SELECT_CMODE, async_service_handler, schema=epson_schema) class EpsonProjector(MediaPlayerDevice): """Representation of Epson Projector Device.""" def __init__(self, websession, name, host, port, encryption): """Initialize entity to control Epson projector.""" import epson_projector as epson from epson_projector.const import DEFAULT_SOURCES self._name = name self._projector = epson.Projector( host, websession=websession, port=port) self._cmode = None self._source_list = list(DEFAULT_SOURCES.values()) self._source = None self._volume = None self._state = None async def async_update(self): """Update state of device.""" from epson_projector.const import ( EPSON_CODES, POWER, CMODE, CMODE_LIST, SOURCE, VOLUME, BUSY, SOURCE_LIST) is_turned_on = await self._projector.get_property(POWER) _LOGGER.debug("Project turn on/off status: %s", is_turned_on) if is_turned_on and is_turned_on == EPSON_CODES[POWER]: self._state = STATE_ON cmode = await self._projector.get_property(CMODE) self._cmode = CMODE_LIST.get(cmode, self._cmode) source = await self._projector.get_property(SOURCE) self._source = SOURCE_LIST.get(source, self._source) volume = await self._projector.get_property(VOLUME) if volume: self._volume = volume elif is_turned_on == BUSY: self._state = STATE_ON else: self._state = STATE_OFF @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the state of the device.""" return self._state @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_EPSON async def async_turn_on(self): """Turn on epson.""" from epson_projector.const import TURN_ON await self._projector.send_command(TURN_ON) async def async_turn_off(self): """Turn off epson.""" from epson_projector.const import TURN_OFF await self._projector.send_command(TURN_OFF) @property def source_list(self): """List of available input sources.""" return self._source_list @property def source(self): """Get current input sources.""" return self._source @property def volume_level(self): """Return the volume level of the media player (0..1).""" return self._volume async def select_cmode(self, cmode): """Set color mode in Epson.""" from epson_projector.const import (CMODE_LIST_SET) await self._projector.send_command(CMODE_LIST_SET[cmode]) async def async_select_source(self, source): """Select input source.""" from epson_projector.const import INV_SOURCES selected_source = INV_SOURCES[source] await self._projector.send_command(selected_source) async def async_mute_volume(self, mute): """Mute (true) or unmute (false) sound.""" from epson_projector.const import MUTE await self._projector.send_command(MUTE) async def async_volume_up(self): """Increase volume.""" from epson_projector.const import VOL_UP await self._projector.send_command(VOL_UP) async def async_volume_down(self): """Decrease volume.""" from epson_projector.const import VOL_DOWN await self._projector.send_command(VOL_DOWN) async def async_media_play(self): """Play media via Epson.""" from epson_projector.const import PLAY await self._projector.send_command(PLAY) async def async_media_pause(self): """Pause media via Epson.""" from epson_projector.const import PAUSE await self._projector.send_command(PAUSE) async def async_media_next_track(self): """Skip to next.""" from epson_projector.const import FAST await self._projector.send_command(FAST) async def async_media_previous_track(self): """Skip to previous.""" from epson_projector.const import BACK await self._projector.send_command(BACK) @property def device_state_attributes(self): """Return device specific state attributes.""" attributes = {} if self._cmode is not None: attributes[ATTR_CMODE] = self._cmode return attributes
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/media_player/epson.py
""" Support for Mikrotik routers as device tracker. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/device_tracker.mikrotik/ """ import logging import ssl import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT, CONF_SSL, CONF_METHOD) REQUIREMENTS = ['librouteros==2.2.0'] _LOGGER = logging.getLogger(__name__) MTK_DEFAULT_API_PORT = '8728' MTK_DEFAULT_API_SSL_PORT = '8729' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_METHOD): cv.string, vol.Optional(CONF_PORT): cv.port, vol.Optional(CONF_SSL, default=False): cv.boolean }) def get_scanner(hass, config): """Validate the configuration and return MTikScanner.""" scanner = MikrotikScanner(config[DOMAIN]) return scanner if scanner.success_init else None class MikrotikScanner(DeviceScanner): """This class queries a Mikrotik router.""" def __init__(self, config): """Initialize the scanner.""" self.last_results = {} self.host = config[CONF_HOST] self.ssl = config[CONF_SSL] try: self.port = config[CONF_PORT] except KeyError: if self.ssl: self.port = MTK_DEFAULT_API_SSL_PORT else: self.port = MTK_DEFAULT_API_PORT self.username = config[CONF_USERNAME] self.password = config[CONF_PASSWORD] self.method = config.get(CONF_METHOD) self.connected = False self.success_init = False self.client = None self.wireless_exist = None self.success_init = self.connect_to_device() if self.success_init: _LOGGER.info("Start polling Mikrotik (%s) router...", self.host) self._update_info() else: _LOGGER.error("Connection to Mikrotik (%s) failed", self.host) def connect_to_device(self): """Connect to Mikrotik method.""" import librouteros try: kwargs = { 'port': self.port, 'encoding': 'utf-8' } if self.ssl: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE kwargs['ssl_wrapper'] = ssl_context.wrap_socket self.client = librouteros.connect( self.host, self.username, self.password, **kwargs ) try: routerboard_info = self.client( cmd='/system/routerboard/getall') except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError): routerboard_info = None raise if routerboard_info: _LOGGER.info( "Connected to Mikrotik %s with IP %s", routerboard_info[0].get('model', 'Router'), self.host) self.connected = True try: self.capsman_exist = self.client( cmd='/caps-man/interface/getall') except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError): self.capsman_exist = False if not self.capsman_exist: _LOGGER.info( "Mikrotik %s: Not a CAPSman controller. Trying " "local interfaces", self.host) try: self.wireless_exist = self.client( cmd='/interface/wireless/getall') except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError): self.wireless_exist = False if not self.wireless_exist and not self.capsman_exist \ or self.method == 'ip': _LOGGER.info( "Mikrotik %s: Wireless adapters not found. Try to " "use DHCP lease table as presence tracker source. " "Please decrease lease time as much as possible", self.host) if self.method: _LOGGER.info( "Mikrotik %s: Manually selected polling method %s", self.host, self.method) except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError) as api_error: _LOGGER.error("Connection error: %s", api_error) return self.connected def scan_devices(self): """Scan for new devices and return a list with found device MACs.""" import librouteros try: self._update_info() except (librouteros.exceptions.TrapError, librouteros.exceptions.MultiTrapError, librouteros.exceptions.ConnectionError) as api_error: _LOGGER.error("Connection error: %s", api_error) self.connect_to_device() return [device for device in self.last_results] def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" return self.last_results.get(device) def _update_info(self): """Retrieve latest information from the Mikrotik box.""" if self.method: devices_tracker = self.method else: if self.capsman_exist: devices_tracker = 'capsman' elif self.wireless_exist: devices_tracker = 'wireless' else: devices_tracker = 'ip' _LOGGER.debug( "Loading %s devices from Mikrotik (%s) ...", devices_tracker, self.host) device_names = self.client(cmd='/ip/dhcp-server/lease/getall') if devices_tracker == 'capsman': devices = self.client( cmd='/caps-man/registration-table/getall') elif devices_tracker == 'wireless': devices = self.client( cmd='/interface/wireless/registration-table/getall') else: devices = device_names if device_names is None and devices is None: return False mac_names = {device.get('mac-address'): device.get('host-name') for device in device_names if device.get('mac-address')} if devices_tracker in ('wireless', 'capsman'): self.last_results = { device.get('mac-address'): mac_names.get(device.get('mac-address')) for device in devices} else: self.last_results = { device.get('mac-address'): mac_names.get(device.get('mac-address')) for device in device_names if device.get('active-address')} return True
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/device_tracker/mikrotik.py
""" Interfaces with iAlarm control panels. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/alarm_control_panel.ialarm/ """ import logging import re import voluptuous as vol import homeassistant.components.alarm_control_panel as alarm from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA from homeassistant.const import ( CONF_CODE, CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['pyialarm==0.3'] _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'iAlarm' def no_application_protocol(value): """Validate that value is without the application protocol.""" protocol_separator = "://" if not value or protocol_separator in value: raise vol.Invalid( 'Invalid host, {} is not allowed'.format(protocol_separator)) return value PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): vol.All(cv.string, no_application_protocol), vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_CODE): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up an iAlarm control panel.""" name = config.get(CONF_NAME) code = config.get(CONF_CODE) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) host = config.get(CONF_HOST) url = 'http://{}'.format(host) ialarm = IAlarmPanel(name, code, username, password, url) add_entities([ialarm], True) class IAlarmPanel(alarm.AlarmControlPanel): """Representation of an iAlarm status.""" def __init__(self, name, code, username, password, url): """Initialize the iAlarm status.""" from pyialarm import IAlarm self._name = name self._code = str(code) if code else None self._username = username self._password = password self._url = url self._state = None self._client = IAlarm(username, password, url) @property def name(self): """Return the name of the device.""" return self._name @property def code_format(self): """Return one or more digits/characters.""" if self._code is None: return None if isinstance(self._code, str) and re.search('^\\d+$', self._code): return alarm.FORMAT_NUMBER return alarm.FORMAT_TEXT @property def state(self): """Return the state of the device.""" return self._state def update(self): """Return the state of the device.""" status = self._client.get_status() _LOGGER.debug('iAlarm status: %s', status) if status: status = int(status) if status == self._client.DISARMED: state = STATE_ALARM_DISARMED elif status == self._client.ARMED_AWAY: state = STATE_ALARM_ARMED_AWAY elif status == self._client.ARMED_STAY: state = STATE_ALARM_ARMED_HOME elif status == self._client.TRIGGERED: state = STATE_ALARM_TRIGGERED else: state = None self._state = state def alarm_disarm(self, code=None): """Send disarm command.""" if self._validate_code(code): self._client.disarm() def alarm_arm_away(self, code=None): """Send arm away command.""" if self._validate_code(code): self._client.arm_away() def alarm_arm_home(self, code=None): """Send arm home command.""" if self._validate_code(code): self._client.arm_stay() def _validate_code(self, code): """Validate given code.""" check = self._code is None or code == self._code if not check: _LOGGER.warning("Wrong code entered") return check
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/alarm_control_panel/ialarm.py
""" Module with location helpers. detect_location_info and elevation are mocked by default during tests. """ import collections import math from typing import Any, Optional, Tuple, Dict import requests ELEVATION_URL = 'http://maps.googleapis.com/maps/api/elevation/json' IP_API = 'http://ip-api.com/json' IPAPI = 'https://ipapi.co/json/' # Constants from https://github.com/maurycyp/vincenty # Earth ellipsoid according to WGS 84 # Axis a of the ellipsoid (Radius of the earth in meters) AXIS_A = 6378137 # Flattening f = (a-b) / a FLATTENING = 1 / 298.257223563 # Axis b of the ellipsoid in meters. AXIS_B = 6356752.314245 MILES_PER_KILOMETER = 0.621371 MAX_ITERATIONS = 200 CONVERGENCE_THRESHOLD = 1e-12 LocationInfo = collections.namedtuple( "LocationInfo", ['ip', 'country_code', 'country_name', 'region_code', 'region_name', 'city', 'zip_code', 'time_zone', 'latitude', 'longitude', 'use_metric']) def detect_location_info() -> Optional[LocationInfo]: """Detect location information.""" data = _get_ipapi() if data is None: data = _get_ip_api() if data is None: return None data['use_metric'] = data['country_code'] not in ( 'US', 'MM', 'LR') return LocationInfo(**data) def distance(lat1: Optional[float], lon1: Optional[float], lat2: float, lon2: float) -> Optional[float]: """Calculate the distance in meters between two points. Async friendly. """ if lat1 is None or lon1 is None: return None result = vincenty((lat1, lon1), (lat2, lon2)) if result is None: return None return result * 1000 def elevation(latitude: float, longitude: float) -> int: """Return elevation for given latitude and longitude.""" try: req = requests.get( ELEVATION_URL, params={ 'locations': '{},{}'.format(latitude, longitude), 'sensor': 'false', }, timeout=10) except requests.RequestException: return 0 if req.status_code != 200: return 0 try: return int(float(req.json()['results'][0]['elevation'])) except (ValueError, KeyError, IndexError): return 0 # Author: https://github.com/maurycyp # Source: https://github.com/maurycyp/vincenty # License: https://github.com/maurycyp/vincenty/blob/master/LICENSE # pylint: disable=invalid-name def vincenty(point1: Tuple[float, float], point2: Tuple[float, float], miles: bool = False) -> Optional[float]: """ Vincenty formula (inverse method) to calculate the distance. Result in kilometers or miles between two points on the surface of a spheroid. Async friendly. """ # short-circuit coincident points if point1[0] == point2[0] and point1[1] == point2[1]: return 0.0 U1 = math.atan((1 - FLATTENING) * math.tan(math.radians(point1[0]))) U2 = math.atan((1 - FLATTENING) * math.tan(math.radians(point2[0]))) L = math.radians(point2[1] - point1[1]) Lambda = L sinU1 = math.sin(U1) cosU1 = math.cos(U1) sinU2 = math.sin(U2) cosU2 = math.cos(U2) for _ in range(MAX_ITERATIONS): sinLambda = math.sin(Lambda) cosLambda = math.cos(Lambda) sinSigma = math.sqrt((cosU2 * sinLambda) ** 2 + (cosU1 * sinU2 - sinU1 * cosU2 * cosLambda) ** 2) if sinSigma == 0: return 0.0 # coincident points cosSigma = sinU1 * sinU2 + cosU1 * cosU2 * cosLambda sigma = math.atan2(sinSigma, cosSigma) sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma cosSqAlpha = 1 - sinAlpha ** 2 try: cos2SigmaM = cosSigma - 2 * sinU1 * sinU2 / cosSqAlpha except ZeroDivisionError: cos2SigmaM = 0 C = FLATTENING / 16 * cosSqAlpha * (4 + FLATTENING * (4 - 3 * cosSqAlpha)) LambdaPrev = Lambda Lambda = L + (1 - C) * FLATTENING * sinAlpha * (sigma + C * sinSigma * (cos2SigmaM + C * cosSigma * (-1 + 2 * cos2SigmaM ** 2))) if abs(Lambda - LambdaPrev) < CONVERGENCE_THRESHOLD: break # successful convergence else: return None # failure to converge uSq = cosSqAlpha * (AXIS_A ** 2 - AXIS_B ** 2) / (AXIS_B ** 2) A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq))) B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq))) deltaSigma = B * sinSigma * (cos2SigmaM + B / 4 * (cosSigma * (-1 + 2 * cos2SigmaM ** 2) - B / 6 * cos2SigmaM * (-3 + 4 * sinSigma ** 2) * (-3 + 4 * cos2SigmaM ** 2))) s = AXIS_B * A * (sigma - deltaSigma) s /= 1000 # Conversion of meters to kilometers if miles: s *= MILES_PER_KILOMETER # kilometers to miles return round(s, 6) def _get_ipapi() -> Optional[Dict[str, Any]]: """Query ipapi.co for location data.""" try: raw_info = requests.get(IPAPI, timeout=5).json() except (requests.RequestException, ValueError): return None return { 'ip': raw_info.get('ip'), 'country_code': raw_info.get('country'), 'country_name': raw_info.get('country_name'), 'region_code': raw_info.get('region_code'), 'region_name': raw_info.get('region'), 'city': raw_info.get('city'), 'zip_code': raw_info.get('postal'), 'time_zone': raw_info.get('timezone'), 'latitude': raw_info.get('latitude'), 'longitude': raw_info.get('longitude'), } def _get_ip_api() -> Optional[Dict[str, Any]]: """Query ip-api.com for location data.""" try: raw_info = requests.get(IP_API, timeout=5).json() except (requests.RequestException, ValueError): return None return { 'ip': raw_info.get('query'), 'country_code': raw_info.get('countryCode'), 'country_name': raw_info.get('country'), 'region_code': raw_info.get('region'), 'region_name': raw_info.get('regionName'), 'city': raw_info.get('city'), 'zip_code': raw_info.get('zip'), 'time_zone': raw_info.get('timezone'), 'latitude': raw_info.get('lat'), 'longitude': raw_info.get('lon'), }
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/util/location.py
""" This platform provides sensors for OpenUV data. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.openuv/ """ import logging from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.components.openuv import ( DATA_OPENUV_CLIENT, DATA_UV, DOMAIN, SENSORS, TOPIC_UPDATE, TYPE_CURRENT_OZONE_LEVEL, TYPE_CURRENT_UV_INDEX, TYPE_CURRENT_UV_LEVEL, TYPE_MAX_UV_INDEX, TYPE_SAFE_EXPOSURE_TIME_1, TYPE_SAFE_EXPOSURE_TIME_2, TYPE_SAFE_EXPOSURE_TIME_3, TYPE_SAFE_EXPOSURE_TIME_4, TYPE_SAFE_EXPOSURE_TIME_5, TYPE_SAFE_EXPOSURE_TIME_6, OpenUvEntity) from homeassistant.util.dt import as_local, parse_datetime DEPENDENCIES = ['openuv'] _LOGGER = logging.getLogger(__name__) ATTR_MAX_UV_TIME = 'time' EXPOSURE_TYPE_MAP = { TYPE_SAFE_EXPOSURE_TIME_1: 'st1', TYPE_SAFE_EXPOSURE_TIME_2: 'st2', TYPE_SAFE_EXPOSURE_TIME_3: 'st3', TYPE_SAFE_EXPOSURE_TIME_4: 'st4', TYPE_SAFE_EXPOSURE_TIME_5: 'st5', TYPE_SAFE_EXPOSURE_TIME_6: 'st6' } UV_LEVEL_EXTREME = "Extreme" UV_LEVEL_VHIGH = "Very High" UV_LEVEL_HIGH = "High" UV_LEVEL_MODERATE = "Moderate" UV_LEVEL_LOW = "Low" async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up an OpenUV sensor based on existing config.""" pass async def async_setup_entry(hass, entry, async_add_entities): """Set up a Nest sensor based on a config entry.""" openuv = hass.data[DOMAIN][DATA_OPENUV_CLIENT][entry.entry_id] sensors = [] for sensor_type in openuv.sensor_conditions: name, icon, unit = SENSORS[sensor_type] sensors.append( OpenUvSensor( openuv, sensor_type, name, icon, unit, entry.entry_id)) async_add_entities(sensors, True) class OpenUvSensor(OpenUvEntity): """Define a binary sensor for OpenUV.""" def __init__(self, openuv, sensor_type, name, icon, unit, entry_id): """Initialize the sensor.""" super().__init__(openuv) self._async_unsub_dispatcher_connect = None self._entry_id = entry_id self._icon = icon self._latitude = openuv.client.latitude self._longitude = openuv.client.longitude self._name = name self._sensor_type = sensor_type self._state = None self._unit = unit @property def icon(self): """Return the icon.""" return self._icon @property def should_poll(self): """Disable polling.""" return False @property def state(self): """Return the status of the sensor.""" return self._state @property def unique_id(self) -> str: """Return a unique, HASS-friendly identifier for this entity.""" return '{0}_{1}_{2}'.format( self._latitude, self._longitude, self._sensor_type) @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.async_schedule_update_ha_state(True) self._async_unsub_dispatcher_connect = async_dispatcher_connect( self.hass, TOPIC_UPDATE, update) async def async_will_remove_from_hass(self): """Disconnect dispatcher listener when removed.""" if self._async_unsub_dispatcher_connect: self._async_unsub_dispatcher_connect() async def async_update(self): """Update the state.""" data = self.openuv.data[DATA_UV]['result'] if self._sensor_type == TYPE_CURRENT_OZONE_LEVEL: self._state = data['ozone'] elif self._sensor_type == TYPE_CURRENT_UV_INDEX: self._state = data['uv'] elif self._sensor_type == TYPE_CURRENT_UV_LEVEL: if data['uv'] >= 11: self._state = UV_LEVEL_EXTREME elif data['uv'] >= 8: self._state = UV_LEVEL_VHIGH elif data['uv'] >= 6: self._state = UV_LEVEL_HIGH elif data['uv'] >= 3: self._state = UV_LEVEL_MODERATE else: self._state = UV_LEVEL_LOW elif self._sensor_type == TYPE_MAX_UV_INDEX: self._state = data['uv_max'] self._attrs.update({ ATTR_MAX_UV_TIME: as_local(parse_datetime(data['uv_max_time'])) }) elif self._sensor_type in (TYPE_SAFE_EXPOSURE_TIME_1, TYPE_SAFE_EXPOSURE_TIME_2, TYPE_SAFE_EXPOSURE_TIME_3, TYPE_SAFE_EXPOSURE_TIME_4, TYPE_SAFE_EXPOSURE_TIME_5, TYPE_SAFE_EXPOSURE_TIME_6): self._state = data['safe_exposure_time'][EXPOSURE_TYPE_MAP[ self._sensor_type]]
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/openuv/sensor.py
"""Home Assistant command line scripts.""" import argparse import asyncio import importlib import logging import os import sys from typing import List from homeassistant.bootstrap import async_mount_local_lib_path from homeassistant.config import get_default_config_dir from homeassistant import requirements from homeassistant.util.package import install_package, is_virtual_env def run(args: List) -> int: """Run a script.""" scripts = [] path = os.path.dirname(__file__) for fil in os.listdir(path): if fil == '__pycache__': continue elif os.path.isdir(os.path.join(path, fil)): scripts.append(fil) elif fil != '__init__.py' and fil.endswith('.py'): scripts.append(fil[:-3]) if not args: print('Please specify a script to run.') print('Available scripts:', ', '.join(scripts)) return 1 if args[0] not in scripts: print('Invalid script specified.') print('Available scripts:', ', '.join(scripts)) return 1 script = importlib.import_module('homeassistant.scripts.' + args[0]) config_dir = extract_config_dir() if not is_virtual_env(): asyncio.get_event_loop().run_until_complete( async_mount_local_lib_path(config_dir)) pip_kwargs = requirements.pip_kwargs(config_dir) logging.basicConfig(stream=sys.stdout, level=logging.INFO) for req in getattr(script, 'REQUIREMENTS', []): returncode = install_package(req, **pip_kwargs) if not returncode: print('Aborting script, could not install dependency', req) return 1 return script.run(args[1:]) # type: ignore def extract_config_dir(args=None) -> str: """Extract the config dir from the arguments or get the default.""" parser = argparse.ArgumentParser(add_help=False) parser.add_argument('-c', '--config', default=None) args = parser.parse_known_args(args)[0] return (os.path.join(os.getcwd(), args.config) if args.config else get_default_config_dir())
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/scripts/__init__.py
""" Support for the Daikin HVAC. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/climate.daikin/ """ import logging import re import voluptuous as vol from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, ATTR_OPERATION_MODE, ATTR_SWING_MODE, PLATFORM_SCHEMA, STATE_AUTO, STATE_COOL, STATE_DRY, STATE_FAN_ONLY, STATE_HEAT, STATE_OFF, SUPPORT_FAN_MODE, SUPPORT_OPERATION_MODE, SUPPORT_SWING_MODE, SUPPORT_TARGET_TEMPERATURE, ClimateDevice) from homeassistant.components.daikin import DOMAIN as DAIKIN_DOMAIN from homeassistant.components.daikin.const import ( ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, ATTR_TARGET_TEMPERATURE) from homeassistant.const import ( ATTR_TEMPERATURE, CONF_HOST, CONF_NAME, TEMP_CELSIUS) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string, }) HA_STATE_TO_DAIKIN = { STATE_FAN_ONLY: 'fan', STATE_DRY: 'dry', STATE_COOL: 'cool', STATE_HEAT: 'hot', STATE_AUTO: 'auto', STATE_OFF: 'off', } DAIKIN_TO_HA_STATE = { 'fan': STATE_FAN_ONLY, 'dry': STATE_DRY, 'cool': STATE_COOL, 'hot': STATE_HEAT, 'auto': STATE_AUTO, 'off': STATE_OFF, } HA_ATTR_TO_DAIKIN = { ATTR_OPERATION_MODE: 'mode', ATTR_FAN_MODE: 'f_rate', ATTR_SWING_MODE: 'f_dir', ATTR_INSIDE_TEMPERATURE: 'htemp', ATTR_OUTSIDE_TEMPERATURE: 'otemp', ATTR_TARGET_TEMPERATURE: 'stemp' } def setup_platform(hass, config, add_entities, discovery_info=None): """Old way of setting up the Daikin HVAC platform. Can only be called when a user accidentally mentions the platform in their config. But even in that case it would have been ignored. """ pass async def async_setup_entry(hass, entry, async_add_entities): """Set up Daikin climate based on config_entry.""" daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) async_add_entities([DaikinClimate(daikin_api)]) class DaikinClimate(ClimateDevice): """Representation of a Daikin HVAC.""" def __init__(self, api): """Initialize the climate device.""" from pydaikin import appliance self._api = api self._list = { ATTR_OPERATION_MODE: list(HA_STATE_TO_DAIKIN), ATTR_FAN_MODE: list( map( str.title, appliance.daikin_values(HA_ATTR_TO_DAIKIN[ATTR_FAN_MODE]) ) ), ATTR_SWING_MODE: list( map( str.title, appliance.daikin_values(HA_ATTR_TO_DAIKIN[ATTR_SWING_MODE]) ) ), } self._supported_features = SUPPORT_TARGET_TEMPERATURE \ | SUPPORT_OPERATION_MODE if self._api.device.support_fan_mode: self._supported_features |= SUPPORT_FAN_MODE if self._api.device.support_swing_mode: self._supported_features |= SUPPORT_SWING_MODE def get(self, key): """Retrieve device settings from API library cache.""" value = None cast_to_float = False if key in [ATTR_TEMPERATURE, ATTR_INSIDE_TEMPERATURE, ATTR_CURRENT_TEMPERATURE]: key = ATTR_INSIDE_TEMPERATURE daikin_attr = HA_ATTR_TO_DAIKIN.get(key) if key == ATTR_INSIDE_TEMPERATURE: value = self._api.device.values.get(daikin_attr) cast_to_float = True elif key == ATTR_TARGET_TEMPERATURE: value = self._api.device.values.get(daikin_attr) cast_to_float = True elif key == ATTR_OUTSIDE_TEMPERATURE: value = self._api.device.values.get(daikin_attr) cast_to_float = True elif key == ATTR_FAN_MODE: value = self._api.device.represent(daikin_attr)[1].title() elif key == ATTR_SWING_MODE: value = self._api.device.represent(daikin_attr)[1].title() elif key == ATTR_OPERATION_MODE: # Daikin can return also internal states auto-1 or auto-7 # and we need to translate them as AUTO daikin_mode = re.sub( '[^a-z]', '', self._api.device.represent(daikin_attr)[1]) ha_mode = DAIKIN_TO_HA_STATE.get(daikin_mode) value = ha_mode if value is None: _LOGGER.error("Invalid value requested for key %s", key) else: if value in ("-", "--"): value = None elif cast_to_float: try: value = float(value) except ValueError: value = None return value def set(self, settings): """Set device settings using API.""" values = {} for attr in [ATTR_TEMPERATURE, ATTR_FAN_MODE, ATTR_SWING_MODE, ATTR_OPERATION_MODE]: value = settings.get(attr) if value is None: continue daikin_attr = HA_ATTR_TO_DAIKIN.get(attr) if daikin_attr is not None: if attr == ATTR_OPERATION_MODE: values[daikin_attr] = HA_STATE_TO_DAIKIN[value] elif value in self._list[attr]: values[daikin_attr] = value.lower() else: _LOGGER.error("Invalid value %s for %s", attr, value) # temperature elif attr == ATTR_TEMPERATURE: try: values['stemp'] = str(int(value)) except ValueError: _LOGGER.error("Invalid temperature %s", value) if values: self._api.device.set(values) @property def supported_features(self): """Return the list of supported features.""" return self._supported_features @property def name(self): """Return the name of the thermostat, if any.""" return self._api.name @property def unique_id(self): """Return a unique ID.""" return self._api.mac @property def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self.get(ATTR_CURRENT_TEMPERATURE) @property def target_temperature(self): """Return the temperature we try to reach.""" return self.get(ATTR_TARGET_TEMPERATURE) @property def target_temperature_step(self): """Return the supported step of target temperature.""" return 1 def set_temperature(self, **kwargs): """Set new target temperature.""" self.set(kwargs) @property def current_operation(self): """Return current operation ie. heat, cool, idle.""" return self.get(ATTR_OPERATION_MODE) @property def operation_list(self): """Return the list of available operation modes.""" return self._list.get(ATTR_OPERATION_MODE) def set_operation_mode(self, operation_mode): """Set HVAC mode.""" self.set({ATTR_OPERATION_MODE: operation_mode}) @property def current_fan_mode(self): """Return the fan setting.""" return self.get(ATTR_FAN_MODE) def set_fan_mode(self, fan_mode): """Set fan mode.""" self.set({ATTR_FAN_MODE: fan_mode}) @property def fan_list(self): """List of available fan modes.""" return self._list.get(ATTR_FAN_MODE) @property def current_swing_mode(self): """Return the fan setting.""" return self.get(ATTR_SWING_MODE) def set_swing_mode(self, swing_mode): """Set new target temperature.""" self.set({ATTR_SWING_MODE: swing_mode}) @property def swing_list(self): """List of available swing modes.""" return self._list.get(ATTR_SWING_MODE) def update(self): """Retrieve latest state.""" self._api.update() @property def device_info(self): """Return a device description for device registry.""" return self._api.device_info
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/daikin/climate.py
""" Binary sensors on Zigbee Home Automation networks. For more details on this platform, please refer to the documentation at https://home-assistant.io/components/binary_sensor.zha/ """ import logging from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDevice from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core.const import ( DATA_ZHA, DATA_ZHA_DISPATCHERS, ZHA_DISCOVERY_NEW, LISTENER_ON_OFF, LISTENER_LEVEL, LISTENER_ZONE, SIGNAL_ATTR_UPDATED, SIGNAL_MOVE_LEVEL, SIGNAL_SET_LEVEL, LISTENER_ATTRIBUTE, UNKNOWN, OPENING, ZONE, OCCUPANCY, ATTR_LEVEL, SENSOR_TYPE) from .entity import ZhaEntity _LOGGER = logging.getLogger(__name__) DEPENDENCIES = ['zha'] # Zigbee Cluster Library Zone Type to Home Assistant device class CLASS_MAPPING = { 0x000d: 'motion', 0x0015: 'opening', 0x0028: 'smoke', 0x002a: 'moisture', 0x002b: 'gas', 0x002d: 'vibration', } async def get_ias_device_class(listener): """Get the HA device class from the listener.""" zone_type = await listener.get_attribute_value('zone_type') return CLASS_MAPPING.get(zone_type) DEVICE_CLASS_REGISTRY = { UNKNOWN: None, OPENING: OPENING, ZONE: get_ias_device_class, OCCUPANCY: OCCUPANCY, } async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Old way of setting up Zigbee Home Automation binary sensors.""" pass async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation binary sensor from config entry.""" async def async_discover(discovery_info): await _async_setup_entities(hass, config_entry, async_add_entities, [discovery_info]) unsub = async_dispatcher_connect( hass, ZHA_DISCOVERY_NEW.format(DOMAIN), async_discover) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) binary_sensors = hass.data.get(DATA_ZHA, {}).get(DOMAIN) if binary_sensors is not None: await _async_setup_entities(hass, config_entry, async_add_entities, binary_sensors.values()) del hass.data[DATA_ZHA][DOMAIN] async def _async_setup_entities(hass, config_entry, async_add_entities, discovery_infos): """Set up the ZHA binary sensors.""" entities = [] for discovery_info in discovery_infos: entities.append(BinarySensor(**discovery_info)) async_add_entities(entities, update_before_add=True) class BinarySensor(ZhaEntity, BinarySensorDevice): """ZHA BinarySensor.""" _domain = DOMAIN _device_class = None def __init__(self, **kwargs): """Initialize the ZHA binary sensor.""" super().__init__(**kwargs) self._device_state_attributes = {} self._zone_listener = self.cluster_listeners.get(LISTENER_ZONE) self._on_off_listener = self.cluster_listeners.get(LISTENER_ON_OFF) self._level_listener = self.cluster_listeners.get(LISTENER_LEVEL) self._attr_listener = self.cluster_listeners.get(LISTENER_ATTRIBUTE) self._zha_sensor_type = kwargs[SENSOR_TYPE] self._level = None async def _determine_device_class(self): """Determine the device class for this binary sensor.""" device_class_supplier = DEVICE_CLASS_REGISTRY.get( self._zha_sensor_type) if callable(device_class_supplier): listener = self.cluster_listeners.get(self._zha_sensor_type) if listener is None: return None return await device_class_supplier(listener) return device_class_supplier async def async_added_to_hass(self): """Run when about to be added to hass.""" self._device_class = await self._determine_device_class() await super().async_added_to_hass() if self._level_listener: await self.async_accept_signal( self._level_listener, SIGNAL_SET_LEVEL, self.set_level) await self.async_accept_signal( self._level_listener, SIGNAL_MOVE_LEVEL, self.move_level) if self._on_off_listener: await self.async_accept_signal( self._on_off_listener, SIGNAL_ATTR_UPDATED, self.async_set_state) if self._zone_listener: await self.async_accept_signal( self._zone_listener, SIGNAL_ATTR_UPDATED, self.async_set_state) if self._attr_listener: await self.async_accept_signal( self._attr_listener, SIGNAL_ATTR_UPDATED, self.async_set_state) @property def is_on(self) -> bool: """Return if the switch is on based on the statemachine.""" if self._state is None: return False return self._state @property def device_class(self) -> str: """Return device class from component DEVICE_CLASSES.""" return self._device_class def async_set_state(self, state): """Set the state.""" self._state = bool(state) self.async_schedule_update_ha_state() def move_level(self, change): """Increment the level, setting state if appropriate.""" level = self._level or 0 if not self._state and change > 0: level = 0 self._level = min(254, max(0, level + change)) self._state = bool(self._level) self.async_schedule_update_ha_state() def set_level(self, level): """Set the level, setting state if appropriate.""" self._level = level self._state = bool(level) self.async_schedule_update_ha_state() @property def device_state_attributes(self): """Return the device state attributes.""" if self._level_listener is not None: self._device_state_attributes.update({ ATTR_LEVEL: self._state and self._level or 0 }) return self._device_state_attributes
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/zha/binary_sensor.py
"""Middleware to fetch real IP.""" from ipaddress import ip_address from aiohttp.web import middleware from aiohttp.hdrs import X_FORWARDED_FOR from homeassistant.core import callback from .const import KEY_REAL_IP @callback def setup_real_ip(app, use_x_forwarded_for, trusted_proxies): """Create IP Ban middleware for the app.""" @middleware async def real_ip_middleware(request, handler): """Real IP middleware.""" connected_ip = ip_address( request.transport.get_extra_info('peername')[0]) request[KEY_REAL_IP] = connected_ip # Only use the XFF header if enabled, present, and from a trusted proxy try: if (use_x_forwarded_for and X_FORWARDED_FOR in request.headers and any(connected_ip in trusted_proxy for trusted_proxy in trusted_proxies)): request[KEY_REAL_IP] = ip_address( request.headers.get(X_FORWARDED_FOR).split(', ')[-1]) except ValueError: pass return await handler(request) app.middlewares.append(real_ip_middleware)
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/http/real_ip.py
""" Support for IHC devices. For more details about this component, please refer to the documentation at https://home-assistant.io/components/ihc/ """ import logging import os.path import voluptuous as vol from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA from homeassistant.components.ihc.const import ( ATTR_IHC_ID, ATTR_VALUE, CONF_AUTOSETUP, CONF_BINARY_SENSOR, CONF_DIMMABLE, CONF_INFO, CONF_INVERTING, CONF_LIGHT, CONF_NODE, CONF_NOTE, CONF_POSITION, CONF_SENSOR, CONF_SWITCH, CONF_XPATH, SERVICE_SET_RUNTIME_VALUE_BOOL, SERVICE_SET_RUNTIME_VALUE_FLOAT, SERVICE_SET_RUNTIME_VALUE_INT) from homeassistant.config import load_yaml_config_file from homeassistant.const import ( CONF_ID, CONF_NAME, CONF_PASSWORD, CONF_TYPE, CONF_UNIT_OF_MEASUREMENT, CONF_URL, CONF_USERNAME, TEMP_CELSIUS) from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import HomeAssistantType REQUIREMENTS = ['ihcsdk==2.2.0', 'defusedxml==0.5.0'] _LOGGER = logging.getLogger(__name__) AUTO_SETUP_YAML = 'ihc_auto_setup.yaml' DOMAIN = 'ihc' IHC_CONTROLLER = 'controller' IHC_DATA = 'ihc{}' IHC_INFO = 'info' IHC_PLATFORMS = ('binary_sensor', 'light', 'sensor', 'switch') def validate_name(config): """Validate the device name.""" if CONF_NAME in config: return config ihcid = config[CONF_ID] name = 'ihc_{}'.format(ihcid) config[CONF_NAME] = name return config DEVICE_SCHEMA = vol.Schema({ vol.Required(CONF_ID): cv.positive_int, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_NOTE): cv.string, vol.Optional(CONF_POSITION): cv.string, }) SWITCH_SCHEMA = DEVICE_SCHEMA.extend({}) BINARY_SENSOR_SCHEMA = DEVICE_SCHEMA.extend({ vol.Optional(CONF_INVERTING, default=False): cv.boolean, vol.Optional(CONF_TYPE): DEVICE_CLASSES_SCHEMA, }) LIGHT_SCHEMA = DEVICE_SCHEMA.extend({ vol.Optional(CONF_DIMMABLE, default=False): cv.boolean, }) SENSOR_SCHEMA = DEVICE_SCHEMA.extend({ vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=TEMP_CELSIUS): cv.string, }) IHC_SCHEMA = vol.Schema({ vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_URL): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_AUTOSETUP, default=True): cv.boolean, vol.Optional(CONF_BINARY_SENSOR, default=[]): vol.All( cv.ensure_list, [vol.All(BINARY_SENSOR_SCHEMA, validate_name)]), vol.Optional(CONF_INFO, default=True): cv.boolean, vol.Optional(CONF_LIGHT, default=[]): vol.All(cv.ensure_list, [vol.All(LIGHT_SCHEMA, validate_name)]), vol.Optional(CONF_SENSOR, default=[]): vol.All(cv.ensure_list, [vol.All(SENSOR_SCHEMA, validate_name)]), vol.Optional(CONF_SWITCH, default=[]): vol.All(cv.ensure_list, [vol.All(SWITCH_SCHEMA, validate_name)]), }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema(vol.All(cv.ensure_list, [IHC_SCHEMA])), }, extra=vol.ALLOW_EXTRA) AUTO_SETUP_SCHEMA = vol.Schema({ vol.Optional(CONF_BINARY_SENSOR, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, vol.Optional(CONF_INVERTING, default=False): cv.boolean, vol.Optional(CONF_TYPE): cv.string, }) ]), vol.Optional(CONF_LIGHT, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, vol.Optional(CONF_DIMMABLE, default=False): cv.boolean, }) ]), vol.Optional(CONF_SENSOR, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=TEMP_CELSIUS): cv.string, }) ]), vol.Optional(CONF_SWITCH, default=[]): vol.All(cv.ensure_list, [ vol.All({ vol.Required(CONF_NODE): cv.string, vol.Required(CONF_XPATH): cv.string, }) ]), }) SET_RUNTIME_VALUE_BOOL_SCHEMA = vol.Schema({ vol.Required(ATTR_IHC_ID): cv.positive_int, vol.Required(ATTR_VALUE): cv.boolean, }) SET_RUNTIME_VALUE_INT_SCHEMA = vol.Schema({ vol.Required(ATTR_IHC_ID): cv.positive_int, vol.Required(ATTR_VALUE): int, }) SET_RUNTIME_VALUE_FLOAT_SCHEMA = vol.Schema({ vol.Required(ATTR_IHC_ID): cv.positive_int, vol.Required(ATTR_VALUE): vol.Coerce(float), }) def setup(hass, config): """Set up the IHC platform.""" conf = config.get(DOMAIN) for index, controller_conf in enumerate(conf): if not ihc_setup(hass, config, controller_conf, index): return False return True def ihc_setup(hass, config, conf, controller_id): """Set up the IHC component.""" from ihcsdk.ihccontroller import IHCController url = conf[CONF_URL] username = conf[CONF_USERNAME] password = conf[CONF_PASSWORD] ihc_controller = IHCController(url, username, password) if not ihc_controller.authenticate(): _LOGGER.error("Unable to authenticate on IHC controller") return False if (conf[CONF_AUTOSETUP] and not autosetup_ihc_products( hass, config, ihc_controller, controller_id)): return False # Manual configuration get_manual_configuration( hass, config, conf, ihc_controller, controller_id) # Store controller configuration ihc_key = IHC_DATA.format(controller_id) hass.data[ihc_key] = { IHC_CONTROLLER: ihc_controller, IHC_INFO: conf[CONF_INFO]} setup_service_functions(hass, ihc_controller) return True def get_manual_configuration( hass, config, conf, ihc_controller, controller_id): """Get manual configuration for IHC devices.""" for component in IHC_PLATFORMS: discovery_info = {} if component in conf: component_setup = conf.get(component) for sensor_cfg in component_setup: name = sensor_cfg[CONF_NAME] device = { 'ihc_id': sensor_cfg[CONF_ID], 'ctrl_id': controller_id, 'product': { 'name': name, 'note': sensor_cfg.get(CONF_NOTE) or '', 'position': sensor_cfg.get(CONF_POSITION) or ''}, 'product_cfg': { 'type': sensor_cfg.get(CONF_TYPE), 'inverting': sensor_cfg.get(CONF_INVERTING), 'dimmable': sensor_cfg.get(CONF_DIMMABLE), 'unit_of_measurement': sensor_cfg.get( CONF_UNIT_OF_MEASUREMENT) } } discovery_info[name] = device if discovery_info: discovery.load_platform( hass, component, DOMAIN, discovery_info, config) def autosetup_ihc_products(hass: HomeAssistantType, config, ihc_controller, controller_id): """Auto setup of IHC products from the IHC project file.""" from defusedxml import ElementTree project_xml = ihc_controller.get_project() if not project_xml: _LOGGER.error("Unable to read project from IHC controller") return False project = ElementTree.fromstring(project_xml) # if an auto setup file exist in the configuration it will override yaml_path = hass.config.path(AUTO_SETUP_YAML) if not os.path.isfile(yaml_path): yaml_path = os.path.join(os.path.dirname(__file__), AUTO_SETUP_YAML) yaml = load_yaml_config_file(yaml_path) try: auto_setup_conf = AUTO_SETUP_SCHEMA(yaml) except vol.Invalid as exception: _LOGGER.error("Invalid IHC auto setup data: %s", exception) return False groups = project.findall('.//group') for component in IHC_PLATFORMS: component_setup = auto_setup_conf[component] discovery_info = get_discovery_info( component_setup, groups, controller_id) if discovery_info: discovery.load_platform( hass, component, DOMAIN, discovery_info, config) return True def get_discovery_info(component_setup, groups, controller_id): """Get discovery info for specified IHC component.""" discovery_data = {} for group in groups: groupname = group.attrib['name'] for product_cfg in component_setup: products = group.findall(product_cfg[CONF_XPATH]) for product in products: nodes = product.findall(product_cfg[CONF_NODE]) for node in nodes: if ('setting' in node.attrib and node.attrib['setting'] == 'yes'): continue ihc_id = int(node.attrib['id'].strip('_'), 0) name = '{}_{}'.format(groupname, ihc_id) device = { 'ihc_id': ihc_id, 'ctrl_id': controller_id, 'product': { 'name': product.get('name') or '', 'note': product.get('note') or '', 'position': product.get('position') or ''}, 'product_cfg': product_cfg} discovery_data[name] = device return discovery_data def setup_service_functions(hass: HomeAssistantType, ihc_controller): """Set up the IHC service functions.""" def set_runtime_value_bool(call): """Set a IHC runtime bool value service function.""" ihc_id = call.data[ATTR_IHC_ID] value = call.data[ATTR_VALUE] ihc_controller.set_runtime_value_bool(ihc_id, value) def set_runtime_value_int(call): """Set a IHC runtime integer value service function.""" ihc_id = call.data[ATTR_IHC_ID] value = call.data[ATTR_VALUE] ihc_controller.set_runtime_value_int(ihc_id, value) def set_runtime_value_float(call): """Set a IHC runtime float value service function.""" ihc_id = call.data[ATTR_IHC_ID] value = call.data[ATTR_VALUE] ihc_controller.set_runtime_value_float(ihc_id, value) hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_BOOL, set_runtime_value_bool, schema=SET_RUNTIME_VALUE_BOOL_SCHEMA) hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_INT, set_runtime_value_int, schema=SET_RUNTIME_VALUE_INT_SCHEMA) hass.services.register(DOMAIN, SERVICE_SET_RUNTIME_VALUE_FLOAT, set_runtime_value_float, schema=SET_RUNTIME_VALUE_FLOAT_SCHEMA)
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/ihc/__init__.py
""" Support for Geofency. For more details about this component, please refer to the documentation at https://home-assistant.io/components/geofency/ """ import logging import voluptuous as vol from aiohttp import web import homeassistant.helpers.config_validation as cv from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER from homeassistant.const import HTTP_UNPROCESSABLE_ENTITY, STATE_NOT_HOME, \ ATTR_LATITUDE, ATTR_LONGITUDE, CONF_WEBHOOK_ID, HTTP_OK, ATTR_NAME from homeassistant.helpers import config_entry_flow from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.util import slugify _LOGGER = logging.getLogger(__name__) DOMAIN = 'geofency' DEPENDENCIES = ['webhook'] CONF_MOBILE_BEACONS = 'mobile_beacons' CONFIG_SCHEMA = vol.Schema({ vol.Optional(DOMAIN): vol.Schema({ vol.Optional(CONF_MOBILE_BEACONS, default=[]): vol.All( cv.ensure_list, [cv.string] ), }), }, extra=vol.ALLOW_EXTRA) ATTR_ADDRESS = 'address' ATTR_BEACON_ID = 'beaconUUID' ATTR_CURRENT_LATITUDE = 'currentLatitude' ATTR_CURRENT_LONGITUDE = 'currentLongitude' ATTR_DEVICE = 'device' ATTR_ENTRY = 'entry' BEACON_DEV_PREFIX = 'beacon' LOCATION_ENTRY = '1' LOCATION_EXIT = '0' TRACKER_UPDATE = '{}_tracker_update'.format(DOMAIN) def _address(value: str) -> str: r"""Coerce address by replacing '\n' with ' '.""" return value.replace('\n', ' ') WEBHOOK_SCHEMA = vol.Schema({ vol.Required(ATTR_ADDRESS): vol.All(cv.string, _address), vol.Required(ATTR_DEVICE): vol.All(cv.string, slugify), vol.Required(ATTR_ENTRY): vol.Any(LOCATION_ENTRY, LOCATION_EXIT), vol.Required(ATTR_LATITUDE): cv.latitude, vol.Required(ATTR_LONGITUDE): cv.longitude, vol.Required(ATTR_NAME): vol.All(cv.string, slugify), vol.Optional(ATTR_CURRENT_LATITUDE): cv.latitude, vol.Optional(ATTR_CURRENT_LONGITUDE): cv.longitude, vol.Optional(ATTR_BEACON_ID): cv.string }, extra=vol.ALLOW_EXTRA) async def async_setup(hass, hass_config): """Set up the Geofency component.""" config = hass_config.get(DOMAIN, {}) mobile_beacons = config.get(CONF_MOBILE_BEACONS, []) hass.data[DOMAIN] = [slugify(beacon) for beacon in mobile_beacons] return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook from Geofency.""" try: data = WEBHOOK_SCHEMA(dict(await request.post())) except vol.MultipleInvalid as error: return web.Response( body=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY ) if _is_mobile_beacon(data, hass.data[DOMAIN]): return _set_location(hass, data, None) if data['entry'] == LOCATION_ENTRY: location_name = data['name'] else: location_name = STATE_NOT_HOME if ATTR_CURRENT_LATITUDE in data: data[ATTR_LATITUDE] = data[ATTR_CURRENT_LATITUDE] data[ATTR_LONGITUDE] = data[ATTR_CURRENT_LONGITUDE] return _set_location(hass, data, location_name) def _is_mobile_beacon(data, mobile_beacons): """Check if we have a mobile beacon.""" return ATTR_BEACON_ID in data and data['name'] in mobile_beacons def _device_name(data): """Return name of device tracker.""" if ATTR_BEACON_ID in data: return "{}_{}".format(BEACON_DEV_PREFIX, data['name']) return data['device'] def _set_location(hass, data, location_name): """Fire HA event to set location.""" device = _device_name(data) async_dispatcher_send( hass, TRACKER_UPDATE, device, (data[ATTR_LATITUDE], data[ATTR_LONGITUDE]), location_name, data ) return web.Response( text="Setting location for {}".format(device), status=HTTP_OK ) async def async_setup_entry(hass, entry): """Configure based on config entry.""" hass.components.webhook.async_register( DOMAIN, 'Geofency', entry.data[CONF_WEBHOOK_ID], handle_webhook) hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER) ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER) return True config_entry_flow.register_webhook_flow( DOMAIN, 'Geofency Webhook', { 'docs_url': 'https://www.home-assistant.io/components/geofency/' } )
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/geofency/__init__.py
""" Platform for the Aladdin Connect cover component. For more details about this platform, please refer to the documentation https://home-assistant.io/components/cover.aladdin_connect/ """ import logging import voluptuous as vol from homeassistant.components.cover import (CoverDevice, PLATFORM_SCHEMA, SUPPORT_OPEN, SUPPORT_CLOSE) from homeassistant.const import (CONF_USERNAME, CONF_PASSWORD, STATE_CLOSED, STATE_OPENING, STATE_CLOSING, STATE_OPEN) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['aladdin_connect==0.3'] _LOGGER = logging.getLogger(__name__) NOTIFICATION_ID = 'aladdin_notification' NOTIFICATION_TITLE = 'Aladdin Connect Cover Setup' STATES_MAP = { 'open': STATE_OPEN, 'opening': STATE_OPENING, 'closed': STATE_CLOSED, 'closing': STATE_CLOSING } SUPPORTED_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Aladdin Connect platform.""" from aladdin_connect import AladdinConnectClient username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) acc = AladdinConnectClient(username, password) try: if not acc.login(): raise ValueError("Username or Password is incorrect") add_entities(AladdinDevice(acc, door) for door in acc.get_doors()) except (TypeError, KeyError, NameError, ValueError) as ex: _LOGGER.error("%s", ex) hass.components.persistent_notification.create( 'Error: {}<br />' 'You will need to restart hass after fixing.' ''.format(ex), title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID) class AladdinDevice(CoverDevice): """Representation of Aladdin Connect cover.""" def __init__(self, acc, device): """Initialize the cover.""" self._acc = acc self._device_id = device['device_id'] self._number = device['door_number'] self._name = device['name'] self._status = STATES_MAP.get(device['status']) @property def device_class(self): """Define this cover as a garage door.""" return 'garage' @property def supported_features(self): """Flag supported features.""" return SUPPORTED_FEATURES @property def unique_id(self): """Return a unique ID.""" return '{}-{}'.format(self._device_id, self._number) @property def name(self): """Return the name of the garage door.""" return self._name @property def is_opening(self): """Return if the cover is opening or not.""" return self._status == STATE_OPENING @property def is_closing(self): """Return if the cover is closing or not.""" return self._status == STATE_CLOSING @property def is_closed(self): """Return None if status is unknown, True if closed, else False.""" if self._status is None: return None return self._status == STATE_CLOSED def close_cover(self, **kwargs): """Issue close command to cover.""" self._acc.close_door(self._device_id, self._number) def open_cover(self, **kwargs): """Issue open command to cover.""" self._acc.open_door(self._device_id, self._number) def update(self): """Update status of cover.""" acc_status = self._acc.get_door_status(self._device_id, self._number) self._status = STATES_MAP.get(acc_status)
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/cover/aladdin_connect.py
""" This component provides HA lock support for Abode Security System. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/lock.abode/ """ import logging from homeassistant.components.abode import AbodeDevice, DOMAIN as ABODE_DOMAIN from homeassistant.components.lock import LockDevice DEPENDENCIES = ['abode'] _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Abode lock devices.""" import abodepy.helpers.constants as CONST data = hass.data[ABODE_DOMAIN] devices = [] for device in data.abode.get_devices(generic_type=CONST.TYPE_LOCK): if data.is_excluded(device): continue devices.append(AbodeLock(data, device)) data.devices.extend(devices) add_entities(devices) class AbodeLock(AbodeDevice, LockDevice): """Representation of an Abode lock.""" def lock(self, **kwargs): """Lock the device.""" self._device.lock() def unlock(self, **kwargs): """Unlock the device.""" self._device.unlock() @property def is_locked(self): """Return true if device is on.""" return self._device.is_locked
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/abode/lock.py
"""Component for interfacing to Lutron Homeworks Series 4 and 8 systems. For more details about this component, please refer to the documentation at https://home-assistant.io/components/homeworks/ """ import logging import voluptuous as vol from homeassistant.core import callback from homeassistant.const import ( CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT, EVENT_HOMEASSISTANT_STOP) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import load_platform from homeassistant.helpers.dispatcher import ( dispatcher_send, async_dispatcher_connect) from homeassistant.util import slugify REQUIREMENTS = ['pyhomeworks==0.0.6'] _LOGGER = logging.getLogger(__name__) DOMAIN = 'homeworks' HOMEWORKS_CONTROLLER = 'homeworks' ENTITY_SIGNAL = 'homeworks_entity_{}' EVENT_BUTTON_PRESS = 'homeworks_button_press' EVENT_BUTTON_RELEASE = 'homeworks_button_release' CONF_DIMMERS = 'dimmers' CONF_KEYPADS = 'keypads' CONF_ADDR = 'addr' CONF_RATE = 'rate' FADE_RATE = 1. CV_FADE_RATE = vol.All(vol.Coerce(float), vol.Range(min=0, max=20)) DIMMER_SCHEMA = vol.Schema({ vol.Required(CONF_ADDR): cv.string, vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_RATE, default=FADE_RATE): CV_FADE_RATE }) KEYPAD_SCHEMA = vol.Schema({ vol.Required(CONF_ADDR): cv.string, vol.Required(CONF_NAME): cv.string, }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.port, vol.Required(CONF_DIMMERS): vol.All(cv.ensure_list, [DIMMER_SCHEMA]), vol.Optional(CONF_KEYPADS, default=[]): vol.All(cv.ensure_list, [KEYPAD_SCHEMA]), }), }, extra=vol.ALLOW_EXTRA) def setup(hass, base_config): """Start Homeworks controller.""" from pyhomeworks.pyhomeworks import Homeworks def hw_callback(msg_type, values): """Dispatch state changes.""" _LOGGER.debug('callback: %s, %s', msg_type, values) addr = values[0] signal = ENTITY_SIGNAL.format(addr) dispatcher_send(hass, signal, msg_type, values) config = base_config.get(DOMAIN) controller = Homeworks(config[CONF_HOST], config[CONF_PORT], hw_callback) hass.data[HOMEWORKS_CONTROLLER] = controller def cleanup(event): controller.close() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup) dimmers = config[CONF_DIMMERS] load_platform(hass, 'light', DOMAIN, {CONF_DIMMERS: dimmers}, base_config) for key_config in config[CONF_KEYPADS]: addr = key_config[CONF_ADDR] name = key_config[CONF_NAME] HomeworksKeypadEvent(hass, addr, name) return True class HomeworksDevice(): """Base class of a Homeworks device.""" def __init__(self, controller, addr, name): """Controller, address, and name of the device.""" self._addr = addr self._name = name self._controller = controller @property def unique_id(self): """Return a unique identifier.""" return 'homeworks.{}'.format(self._addr) @property def name(self): """Device name.""" return self._name @property def should_poll(self): """No need to poll.""" return False class HomeworksKeypadEvent: """When you want signals instead of entities. Stateless sensors such as keypads are expected to generate an event instead of a sensor entity in hass. """ def __init__(self, hass, addr, name): """Register callback that will be used for signals.""" self._hass = hass self._addr = addr self._name = name self._id = slugify(self._name) signal = ENTITY_SIGNAL.format(self._addr) async_dispatcher_connect( self._hass, signal, self._update_callback) @callback def _update_callback(self, msg_type, values): """Fire events if button is pressed or released.""" from pyhomeworks.pyhomeworks import ( HW_BUTTON_PRESSED, HW_BUTTON_RELEASED) if msg_type == HW_BUTTON_PRESSED: event = EVENT_BUTTON_PRESS elif msg_type == HW_BUTTON_RELEASED: event = EVENT_BUTTON_RELEASE else: return data = {CONF_ID: self._id, CONF_NAME: self._name, 'button': values[1]} self._hass.bus.async_fire(event, data)
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/homeworks/__init__.py
""" Support for MAX! Window Shutter via MAX! Cube. For more details about this platform, please refer to the documentation https://home-assistant.io/components/maxcube/ """ import logging from homeassistant.components.binary_sensor import BinarySensorDevice from homeassistant.components.maxcube import DATA_KEY _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Iterate through all MAX! Devices and add window shutters.""" devices = [] for handler in hass.data[DATA_KEY].values(): cube = handler.cube for device in cube.devices: name = "{} {}".format( cube.room_by_id(device.room_id).name, device.name) # Only add Window Shutters if cube.is_windowshutter(device): devices.append( MaxCubeShutter(handler, name, device.rf_address)) if devices: add_entities(devices) class MaxCubeShutter(BinarySensorDevice): """Representation of a MAX! Cube Binary Sensor device.""" def __init__(self, handler, name, rf_address): """Initialize MAX! Cube BinarySensorDevice.""" self._name = name self._sensor_type = 'window' self._rf_address = rf_address self._cubehandle = handler self._state = None @property def should_poll(self): """Return the polling state.""" return True @property def name(self): """Return the name of the BinarySensorDevice.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return self._sensor_type @property def is_on(self): """Return true if the binary sensor is on/open.""" return self._state def update(self): """Get latest data from MAX! Cube.""" self._cubehandle.update() device = self._cubehandle.cube.device_by_rf(self._rf_address) self._state = device.is_open
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/maxcube/binary_sensor.py
""" A sensor platform that give you information about next departures from Ruter. For more details about this platform, please refer to the documentation at https://www.home-assistant.io/components/sensor.ruter/ """ import logging import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME from homeassistant.helpers.entity import Entity from homeassistant.helpers.aiohttp_client import async_get_clientsession REQUIREMENTS = ['pyruter==1.1.0'] _LOGGER = logging.getLogger(__name__) CONF_STOP_ID = 'stop_id' CONF_DESTINATION = 'destination' CONF_OFFSET = 'offset' DEFAULT_NAME = 'Ruter' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_STOP_ID): cv.positive_int, vol.Optional(CONF_DESTINATION): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_OFFSET, default=0): cv.positive_int, }) async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Create the sensor.""" from pyruter.api import Departures _LOGGER.warning("The API used in this sensor is shutting down soon, " "you should consider starting to use the " "'entur_public_transport' sensor instead") stop_id = config[CONF_STOP_ID] destination = config.get(CONF_DESTINATION) name = config[CONF_NAME] offset = config[CONF_OFFSET] session = async_get_clientsession(hass) ruter = Departures(hass.loop, stop_id, destination, session) sensor = [RuterSensor(ruter, name, offset)] async_add_entities(sensor, True) class RuterSensor(Entity): """Representation of a Ruter sensor.""" def __init__(self, ruter, name, offset): """Initialize the sensor.""" self.ruter = ruter self._attributes = {} self._name = name self._offset = offset self._state = None async def async_update(self): """Get the latest data from the Ruter API.""" await self.ruter.get_departures() if self.ruter.departures is None: _LOGGER.error("No data recieved from Ruter.") return try: data = self.ruter.departures[self._offset] self._state = data['time'] self._attributes['line'] = data['line'] self._attributes['destination'] = data['destination'] except (KeyError, IndexError) as error: _LOGGER.debug("Error getting data from Ruter, %s", error) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Return the icon of the sensor.""" return 'mdi:bus' @property def device_state_attributes(self): """Return attributes for the sensor.""" return self._attributes
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/sensor/ruter.py
""" Support for MySensors lights. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/light.mysensors/ """ from homeassistant.components import mysensors from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, Light) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.util.color import rgb_hex_to_rgb_list import homeassistant.util.color as color_util SUPPORT_MYSENSORS_RGBW = SUPPORT_COLOR | SUPPORT_WHITE_VALUE async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for lights.""" device_class_map = { 'S_DIMMER': MySensorsLightDimmer, 'S_RGB_LIGHT': MySensorsLightRGB, 'S_RGBW_LIGHT': MySensorsLightRGBW, } mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, device_class_map, async_add_entities=async_add_entities) class MySensorsLight(mysensors.device.MySensorsEntity, Light): """Representation of a MySensors Light child node.""" def __init__(self, *args): """Initialize a MySensors Light.""" super().__init__(*args) self._state = None self._brightness = None self._hs = None self._white = None @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def hs_color(self): """Return the hs color value [int, int].""" return self._hs @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def assumed_state(self): """Return true if unable to access real state of entity.""" return self.gateway.optimistic @property def is_on(self): """Return true if device is on.""" return self._state def _turn_on_light(self): """Turn on light child device.""" set_req = self.gateway.const.SetReq if self._state: return self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_LIGHT, 1) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = True self._values[set_req.V_LIGHT] = STATE_ON def _turn_on_dimmer(self, **kwargs): """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq brightness = self._brightness if ATTR_BRIGHTNESS not in kwargs or \ kwargs[ATTR_BRIGHTNESS] == self._brightness or \ set_req.V_DIMMER not in self._values: return brightness = kwargs[ATTR_BRIGHTNESS] percent = round(100 * brightness / 255) self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, percent) if self.gateway.optimistic: # optimistically assume that light has changed state self._brightness = brightness self._values[set_req.V_DIMMER] = percent def _turn_on_rgb_and_w(self, hex_template, **kwargs): """Turn on RGB or RGBW child device.""" rgb = list(color_util.color_hs_to_RGB(*self._hs)) white = self._white hex_color = self._values.get(self.value_type) hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None: new_rgb = color_util.color_hs_to_RGB(*hs_color) else: new_rgb = None new_white = kwargs.get(ATTR_WHITE_VALUE) if new_rgb is None and new_white is None: return if new_rgb is not None: rgb = list(new_rgb) if hex_template == '%02x%02x%02x%02x': if new_white is not None: rgb.append(new_white) else: rgb.append(white) hex_color = hex_template % tuple(rgb) if len(rgb) > 3: white = rgb.pop() self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color) if self.gateway.optimistic: # optimistically assume that light has changed state self._hs = color_util.color_RGB_to_hs(*rgb) self._white = white self._values[self.value_type] = hex_color async def async_turn_off(self, **kwargs): """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value( self.node_id, self.child_id, value_type, 0) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = False self._values[value_type] = STATE_OFF self.async_schedule_update_ha_state() def _async_update_light(self): """Update the controller with values from light child.""" value_type = self.gateway.const.SetReq.V_LIGHT self._state = self._values[value_type] == STATE_ON def _async_update_dimmer(self): """Update the controller with values from dimmer child.""" value_type = self.gateway.const.SetReq.V_DIMMER if value_type in self._values: self._brightness = round(255 * int(self._values[value_type]) / 100) if self._brightness == 0: self._state = False def _async_update_rgb_or_w(self): """Update the controller with values from RGB or RGBW child.""" value = self._values[self.value_type] color_list = rgb_hex_to_rgb_list(value) if len(color_list) > 3: self._white = color_list.pop() self._hs = color_util.color_RGB_to_hs(*color_list) class MySensorsLightDimmer(MySensorsLight): """Dimmer child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() class MySensorsLightRGB(MySensorsLight): """RGB child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_COLOR return SUPPORT_COLOR async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w('%02x%02x%02x', **kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() self._async_update_rgb_or_w() class MySensorsLightRGBW(MySensorsLightRGB): """RGBW child class to MySensorsLightRGB.""" # pylint: disable=too-many-ancestors @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_MYSENSORS_RGBW return SUPPORT_MYSENSORS_RGBW async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w('%02x%02x%02x%02x', **kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state()
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/mysensors/light.py
"""Icon helper methods.""" from typing import Optional def icon_for_battery_level(battery_level: Optional[int] = None, charging: bool = False) -> str: """Return a battery icon valid identifier.""" icon = 'mdi:battery' if battery_level is None: return icon + '-unknown' if charging and battery_level > 10: icon += '-charging-{}'.format( int(round(battery_level / 20 - .01)) * 20) elif charging: icon += '-outline' elif battery_level <= 5: icon += '-alert' elif 5 < battery_level < 95: icon += '-{}'.format(int(round(battery_level / 10 - .01)) * 10) return icon
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/helpers/icon.py
""" Component for monitoring activity on a folder. For more details about this platform, refer to the documentation at https://home-assistant.io/components/folder_watcher/ """ import os import logging import voluptuous as vol from homeassistant.const import ( EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['watchdog==0.8.3'] _LOGGER = logging.getLogger(__name__) CONF_FOLDER = 'folder' CONF_PATTERNS = 'patterns' DEFAULT_PATTERN = '*' DOMAIN = "folder_watcher" CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.All(cv.ensure_list, [vol.Schema({ vol.Required(CONF_FOLDER): cv.isdir, vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): vol.All(cv.ensure_list, [cv.string]), })]) }, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Set up the folder watcher.""" conf = config[DOMAIN] for watcher in conf: path = watcher[CONF_FOLDER] patterns = watcher[CONF_PATTERNS] if not hass.config.is_allowed_path(path): _LOGGER.error("folder %s is not valid or allowed", path) return False Watcher(path, patterns, hass) return True def create_event_handler(patterns, hass): """Return the Watchdog EventHandler object.""" from watchdog.events import PatternMatchingEventHandler class EventHandler(PatternMatchingEventHandler): """Class for handling Watcher events.""" def __init__(self, patterns, hass): """Initialise the EventHandler.""" super().__init__(patterns) self.hass = hass def process(self, event): """On Watcher event, fire HA event.""" _LOGGER.debug("process(%s)", event) if not event.is_directory: folder, file_name = os.path.split(event.src_path) self.hass.bus.fire( DOMAIN, { "event_type": event.event_type, 'path': event.src_path, 'file': file_name, 'folder': folder, }) def on_modified(self, event): """File modified.""" self.process(event) def on_moved(self, event): """File moved.""" self.process(event) def on_created(self, event): """File created.""" self.process(event) def on_deleted(self, event): """File deleted.""" self.process(event) return EventHandler(patterns, hass) class Watcher(): """Class for starting Watchdog.""" def __init__(self, path, patterns, hass): """Initialise the watchdog observer.""" from watchdog.observers import Observer self._observer = Observer() self._observer.schedule( create_event_handler(patterns, hass), path, recursive=True) hass.bus.listen_once(EVENT_HOMEASSISTANT_START, self.startup) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self.shutdown) def startup(self, event): """Start the watcher.""" self._observer.start() def shutdown(self, event): """Shutdown the watcher.""" self._observer.stop() self._observer.join()
"""The tests for the litejet component.""" import logging from unittest import mock from datetime import timedelta import pytest from homeassistant import setup import homeassistant.util.dt as dt_util from homeassistant.components import litejet import homeassistant.components.automation as automation from tests.common import (async_fire_time_changed, async_mock_service) _LOGGER = logging.getLogger(__name__) ENTITY_SWITCH = 'switch.mock_switch_1' ENTITY_SWITCH_NUMBER = 1 ENTITY_OTHER_SWITCH = 'switch.mock_switch_2' ENTITY_OTHER_SWITCH_NUMBER = 2 @pytest.fixture def calls(hass): """Track calls to a mock serivce.""" return async_mock_service(hass, 'test', 'automation') def get_switch_name(number): """Get a mock switch name.""" return "Mock Switch #"+str(number) @pytest.fixture def mock_lj(hass): """Initialize components.""" with mock.patch('pylitejet.LiteJet') as mock_pylitejet: mock_lj = mock_pylitejet.return_value mock_lj.switch_pressed_callbacks = {} mock_lj.switch_released_callbacks = {} def on_switch_pressed(number, callback): mock_lj.switch_pressed_callbacks[number] = callback def on_switch_released(number, callback): mock_lj.switch_released_callbacks[number] = callback mock_lj.loads.return_value = range(0) mock_lj.button_switches.return_value = range(1, 3) mock_lj.all_switches.return_value = range(1, 6) mock_lj.scenes.return_value = range(0) mock_lj.get_switch_name.side_effect = get_switch_name mock_lj.on_switch_pressed.side_effect = on_switch_pressed mock_lj.on_switch_released.side_effect = on_switch_released config = { 'litejet': { 'port': '/tmp/this_will_be_mocked' } } assert hass.loop.run_until_complete(setup.async_setup_component( hass, litejet.DOMAIN, config)) mock_lj.start_time = dt_util.utcnow() mock_lj.last_delta = timedelta(0) return mock_lj async def simulate_press(hass, mock_lj, number): """Test to simulate a press.""" _LOGGER.info('*** simulate press of %d', number) callback = mock_lj.switch_pressed_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_release(hass, mock_lj, number): """Test to simulate releasing.""" _LOGGER.info('*** simulate release of %d', number) callback = mock_lj.switch_released_callbacks.get(number) with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + mock_lj.last_delta): if callback is not None: await hass.async_add_job(callback) await hass.async_block_till_done() async def simulate_time(hass, mock_lj, delta): """Test to simulate time.""" _LOGGER.info( '*** simulate time change by %s: %s', delta, mock_lj.start_time + delta) mock_lj.last_delta = delta with mock.patch('homeassistant.helpers.condition.dt_util.utcnow', return_value=mock_lj.start_time + delta): _LOGGER.info('now=%s', dt_util.utcnow()) async_fire_time_changed(hass, mock_lj.start_time + delta) await hass.async_block_till_done() _LOGGER.info('done with now=%s', dt_util.utcnow()) async def setup_automation(hass, trigger): """Test setting up the automation.""" assert await setup.async_setup_component(hass, automation.DOMAIN, { automation.DOMAIN: [ { 'alias': 'My Test', 'trigger': trigger, 'action': { 'service': 'test.automation' } } ] }) await hass.async_block_till_done() async def test_simple(hass, calls, mock_lj): """Test the simplest form of a LiteJet trigger.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_more_than_short(hass, calls, mock_lj): """Test a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_more_than_long(hass, calls, mock_lj): """Test a hold that is long enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 1 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_short(hass, calls, mock_lj): """Test a hold that is short enough.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.1)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_less_than_long(hass, calls, mock_lj): """Test a hold that is too long.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_less_than': { 'milliseconds': '200' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.3)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_short(hass, calls, mock_lj): """Test an in-range trigger with a too short hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_lj, timedelta(seconds=0.05)) await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 async def test_held_in_range_just_right(hass, calls, mock_lj): """Test an in-range trigger with a just right hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.2)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 1 async def test_held_in_range_long(hass, calls, mock_lj): """Test an in-range trigger with a too long hold.""" await setup_automation(hass, { 'platform': 'litejet', 'number': ENTITY_OTHER_SWITCH_NUMBER, 'held_more_than': { 'milliseconds': '100' }, 'held_less_than': { 'milliseconds': '300' } }) await simulate_press(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0 await simulate_time(hass, mock_lj, timedelta(seconds=0.4)) assert len(calls) == 0 await simulate_release(hass, mock_lj, ENTITY_OTHER_SWITCH_NUMBER) assert len(calls) == 0
PetePriority/home-assistant
tests/components/automation/test_litejet.py
homeassistant/components/folder_watcher/__init__.py
"""Monitor Memory on a CFME/Miq appliance and builds report&graphs displaying usage per process.""" import json import os import time import traceback from collections import OrderedDict from datetime import datetime from threading import Thread import yaml from yaycl import AttrDict from cfme.utils.conf import cfme_performance from cfme.utils.log import logger from cfme.utils.path import results_path from cfme.utils.version import current_version from cfme.utils.version import get_version miq_workers = [ 'MiqGenericWorker', 'MiqPriorityWorker', 'MiqScheduleWorker', 'MiqUiWorker', 'MiqWebServiceWorker', 'MiqWebsocketWorker', 'MiqReportingWorker', 'MiqReplicationWorker', 'MiqSmartProxyWorker', 'MiqVimBrokerWorker', 'MiqEmsRefreshCoreWorker', # Refresh Workers: 'ManageIQ::Providers::Microsoft::InfraManager::RefreshWorker', 'ManageIQ::Providers::Openstack::InfraManager::RefreshWorker', 'ManageIQ::Providers::Redhat::InfraManager::RefreshWorker', 'ManageIQ::Providers::Vmware::InfraManager::RefreshWorker', 'MiqEmsRefreshWorkerMicrosoft', # 5.4 'MiqEmsRefreshWorkerRedhat', # 5.4 'MiqEmsRefreshWorkerVmware', # 5.4 'ManageIQ::Providers::Amazon::CloudManager::RefreshWorker', 'ManageIQ::Providers::Azure::CloudManager::RefreshWorker', 'ManageIQ::Providers::Google::CloudManager::RefreshWorker', 'ManageIQ::Providers::Openstack::CloudManager::RefreshWorker', 'MiqEmsRefreshWorkerAmazon', # 5.4 'MiqEmsRefreshWorkerOpenstack', # 5.4 'ManageIQ::Providers::AnsibleTower::ConfigurationManager::RefreshWorker', 'ManageIQ::Providers::Foreman::ConfigurationManager::RefreshWorker', 'ManageIQ::Providers::Foreman::ProvisioningManager::RefreshWorker', 'MiqEmsRefreshWorkerForemanConfiguration', # 5.4 'MiqEmsRefreshWorkerForemanProvisioning', # 5.4 'ManageIQ::Providers::Atomic::ContainerManager::RefreshWorker', 'ManageIQ::Providers::AtomicEnterprise::ContainerManager::RefreshWorker', 'ManageIQ::Providers::Kubernetes::ContainerManager::RefreshWorker', 'ManageIQ::Providers::Openshift::ContainerManager::RefreshWorker', 'ManageIQ::Providers::OpenshiftEnterprise::ContainerManager::RefreshWorker', 'ManageIQ::Providers::StorageManager::CinderManager::RefreshWorker', 'ManageIQ::Providers::StorageManager::SwiftManager::RefreshWorker', 'ManageIQ::Providers::Amazon::NetworkManager::RefreshWorker', 'ManageIQ::Providers::Azure::NetworkManager::RefreshWorker', 'ManageIQ::Providers::Google::NetworkManager::RefreshWorker', 'ManageIQ::Providers::Openstack::NetworkManager::RefreshWorker', 'MiqNetappRefreshWorker', 'MiqSmisRefreshWorker', # Event Workers: 'MiqEventHandler', 'ManageIQ::Providers::Openstack::InfraManager::EventCatcher', 'ManageIQ::Providers::StorageManager::CinderManager::EventCatcher', 'ManageIQ::Providers::Redhat::InfraManager::EventCatcher', 'ManageIQ::Providers::Vmware::InfraManager::EventCatcher', 'MiqEventCatcherRedhat', # 5.4 'MiqEventCatcherVmware', # 5.4 'ManageIQ::Providers::Amazon::CloudManager::EventCatcher', 'ManageIQ::Providers::Azure::CloudManager::EventCatcher', 'ManageIQ::Providers::Google::CloudManager::EventCatcher', 'ManageIQ::Providers::Openstack::CloudManager::EventCatcher', 'MiqEventCatcherAmazon', # 5.4 'MiqEventCatcherOpenstack', # 5.4 'ManageIQ::Providers::Atomic::ContainerManager::EventCatcher', 'ManageIQ::Providers::AtomicEnterprise::ContainerManager::EventCatcher', 'ManageIQ::Providers::Kubernetes::ContainerManager::EventCatcher', 'ManageIQ::Providers::Openshift::ContainerManager::EventCatcher', 'ManageIQ::Providers::OpenshiftEnterprise::ContainerManager::EventCatcher', 'ManageIQ::Providers::Openstack::NetworkManager::EventCatcher', # Metrics Processor/Collector Workers 'MiqEmsMetricsProcessorWorker', 'ManageIQ::Providers::Openstack::InfraManager::MetricsCollectorWorker', 'ManageIQ::Providers::Redhat::InfraManager::MetricsCollectorWorker', 'ManageIQ::Providers::Vmware::InfraManager::MetricsCollectorWorker', 'MiqEmsMetricsCollectorWorkerRedhat', # 5.4 'MiqEmsMetricsCollectorWorkerVmware', # 5.4 'ManageIQ::Providers::Amazon::CloudManager::MetricsCollectorWorker', 'ManageIQ::Providers::Azure::CloudManager::MetricsCollectorWorker', 'ManageIQ::Providers::Openstack::CloudManager::MetricsCollectorWorker', 'MiqEmsMetricsCollectorWorkerAmazon', # 5.4 'MiqEmsMetricsCollectorWorkerOpenstack', # 5.4 'ManageIQ::Providers::Atomic::ContainerManager::MetricsCollectorWorker', 'ManageIQ::Providers::AtomicEnterprise::ContainerManager::MetricsCollectorWorker', 'ManageIQ::Providers::Kubernetes::ContainerManager::MetricsCollectorWorker', 'ManageIQ::Providers::Openshift::ContainerManager::MetricsCollectorWorker', 'ManageIQ::Providers::OpenshiftEnterprise::ContainerManager::MetricsCollectorWorker', 'ManageIQ::Providers::Openstack::NetworkManager::MetricsCollectorWorker', 'MiqStorageMetricsCollectorWorker', 'MiqVmdbStorageBridgeWorker'] ruby_processes = list(miq_workers) ruby_processes.extend(['evm:dbsync:replicate', 'MIQ Server (evm_server.rb)', 'evm_watchdog.rb', 'appliance_console.rb']) process_order = list(ruby_processes) process_order.extend(['memcached', 'postgres', 'httpd', 'collectd']) # Timestamp created at first import, thus grouping all reports of like workload test_ts = time.strftime('%Y%m%d%H%M%S') # 10s sample interval (occasionally sampling can take almost 4s on an appliance doing a lot of work) SAMPLE_INTERVAL = 10 class SmemMemoryMonitor(Thread): def __init__(self, ssh_client, scenario_data): super(SmemMemoryMonitor, self).__init__() self.ssh_client = ssh_client self.scenario_data = scenario_data self.grafana_urls = {} self.miq_server_id = '' self.use_slab = False self.signal = True def create_process_result(self, process_results, starttime, process_pid, process_name, memory_by_pid): if process_pid in list(memory_by_pid.keys()): if process_name not in process_results: process_results[process_name] = OrderedDict() process_results[process_name][process_pid] = OrderedDict() if process_pid not in process_results[process_name]: process_results[process_name][process_pid] = OrderedDict() process_results[process_name][process_pid][starttime] = {} rss_mem = memory_by_pid[process_pid]['rss'] pss_mem = memory_by_pid[process_pid]['pss'] uss_mem = memory_by_pid[process_pid]['uss'] vss_mem = memory_by_pid[process_pid]['vss'] swap_mem = memory_by_pid[process_pid]['swap'] process_results[process_name][process_pid][starttime]['rss'] = rss_mem process_results[process_name][process_pid][starttime]['pss'] = pss_mem process_results[process_name][process_pid][starttime]['uss'] = uss_mem process_results[process_name][process_pid][starttime]['vss'] = vss_mem process_results[process_name][process_pid][starttime]['swap'] = swap_mem del memory_by_pid[process_pid] else: logger.warning('Process {} PID, not found: {}'.format(process_name, process_pid)) def get_appliance_memory(self, appliance_results, plottime): # 5.5/5.6 - RHEL 7 / Centos 7 # Application Memory Used : MemTotal - (MemFree + Slab + Cached) # 5.4 - RHEL 6 / Centos 6 # Application Memory Used : MemTotal - (MemFree + Buffers + Cached) # Available memory could potentially be better metric appliance_results[plottime] = {} result = self.ssh_client.run_command('cat /proc/meminfo') if result.failed: logger.error('Exit_status nonzero in get_appliance_memory: {}, {}' .format(result.rc, result.output)) del appliance_results[plottime] else: meminfo_raw = result.output.replace('kB', '').strip() meminfo = OrderedDict((k.strip(), v.strip()) for k, v in (value.strip().split(':') for value in meminfo_raw.split('\n'))) appliance_results[plottime]['total'] = float(meminfo['MemTotal']) / 1024 appliance_results[plottime]['free'] = float(meminfo['MemFree']) / 1024 if 'MemAvailable' in meminfo: # 5.5, RHEL 7/Centos 7 self.use_slab = True mem_used = (float(meminfo['MemTotal']) - (float(meminfo['MemFree']) + float( meminfo['Slab']) + float(meminfo['Cached']))) / 1024 else: # 5.4, RHEL 6/Centos 6 mem_used = (float(meminfo['MemTotal']) - (float(meminfo['MemFree']) + float( meminfo['Buffers']) + float(meminfo['Cached']))) / 1024 appliance_results[plottime]['used'] = mem_used appliance_results[plottime]['buffers'] = float(meminfo['Buffers']) / 1024 appliance_results[plottime]['cached'] = float(meminfo['Cached']) / 1024 appliance_results[plottime]['slab'] = float(meminfo['Slab']) / 1024 appliance_results[plottime]['swap_total'] = float(meminfo['SwapTotal']) / 1024 appliance_results[plottime]['swap_free'] = float(meminfo['SwapFree']) / 1024 def get_evm_workers(self): result = self.ssh_client.run_command( 'psql -t -q -d vmdb_production -c ' '\"select pid,type from miq_workers where miq_server_id = \'{}\'\"'.format( self.miq_server_id)) if result.output.strip(): workers = {} for worker in result.output.strip().split('\n'): pid_worker = worker.strip().split('|') if len(pid_worker) == 2: workers[pid_worker[0].strip()] = pid_worker[1].strip() else: logger.error('Unexpected output from psql: {}'.format(worker)) return workers else: return {} # Old method of obtaining per process memory (Appliances without smem) # def get_pids_memory(self): # result = self.ssh_client.run_command( # 'ps -A -o pid,rss,vsz,comm,cmd | sed 1d') # pids_memory = result.output.strip().split('\n') # memory_by_pid = {} # for line in pids_memory: # values = [s for s in line.strip().split(' ') if s] # pid = values[0] # memory_by_pid[pid] = {} # memory_by_pid[pid]['rss'] = float(values[1]) / 1024 # memory_by_pid[pid]['vss'] = float(values[2]) / 1024 # memory_by_pid[pid]['name'] = values[3] # memory_by_pid[pid]['cmd'] = ' '.join(values[4:]) # return memory_by_pid def get_miq_server_id(self): # Obtain the Miq Server GUID: result = self.ssh_client.run_command('cat /var/www/miq/vmdb/GUID') logger.info('Obtained appliance GUID: {}'.format(result.output.strip())) # Get server id: result = self.ssh_client.run_command( 'psql -t -q -d vmdb_production -c "select id from miq_servers where guid = \'{}\'"' ''.format(result.output.strip())) logger.info('Obtained miq_server_id: {}'.format(result.output.strip())) self.miq_server_id = result.output.strip() def get_pids_memory(self): result = self.ssh_client.run_command( 'smem -c \'pid rss pss uss vss swap name command\' | sed 1d') pids_memory = result.output.strip().split('\n') memory_by_pid = {} for line in pids_memory: if line.strip(): try: values = [s for s in line.strip().split(' ') if s] pid = values[0] int(pid) memory_by_pid[pid] = {} memory_by_pid[pid]['rss'] = float(values[1]) / 1024 memory_by_pid[pid]['pss'] = float(values[2]) / 1024 memory_by_pid[pid]['uss'] = float(values[3]) / 1024 memory_by_pid[pid]['vss'] = float(values[4]) / 1024 memory_by_pid[pid]['swap'] = float(values[5]) / 1024 memory_by_pid[pid]['name'] = values[6] memory_by_pid[pid]['cmd'] = ' '.join(values[7:]) except Exception as e: logger.error('Processing smem output error: {}'.format(e.__class__.__name__, e)) logger.error('Issue with pid: {} line: {}'.format(pid, line)) logger.error('Complete smem output: {}'.format(result.output)) return memory_by_pid def _real_run(self): """ Result dictionaries: appliance_results[timestamp][measurement] = value appliance_results[timestamp]['total'] = value appliance_results[timestamp]['free'] = value appliance_results[timestamp]['used'] = value appliance_results[timestamp]['buffers'] = value appliance_results[timestamp]['cached'] = value appliance_results[timestamp]['slab'] = value appliance_results[timestamp]['swap_total'] = value appliance_results[timestamp]['swap_free'] = value appliance measurements: total/free/used/buffers/cached/slab/swap_total/swap_free process_results[name][pid][timestamp][measurement] = value process_results[name][pid][timestamp]['rss'] = value process_results[name][pid][timestamp]['pss'] = value process_results[name][pid][timestamp]['uss'] = value process_results[name][pid][timestamp]['vss'] = value process_results[name][pid][timestamp]['swap'] = value """ appliance_results = OrderedDict() process_results = OrderedDict() install_smem(self.ssh_client) self.get_miq_server_id() logger.info('Starting Monitoring Thread.') while self.signal: starttime = time.time() plottime = datetime.now() self.get_appliance_memory(appliance_results, plottime) workers = self.get_evm_workers() memory_by_pid = self.get_pids_memory() for worker_pid in workers: self.create_process_result(process_results, plottime, worker_pid, workers[worker_pid], memory_by_pid) for pid in sorted(memory_by_pid.keys()): if memory_by_pid[pid]['name'] == 'httpd': self.create_process_result(process_results, plottime, pid, 'httpd', memory_by_pid) elif memory_by_pid[pid]['name'] == 'postgres': self.create_process_result(process_results, plottime, pid, 'postgres', memory_by_pid) elif memory_by_pid[pid]['name'] == 'postmaster': self.create_process_result(process_results, plottime, pid, 'postgres', memory_by_pid) elif memory_by_pid[pid]['name'] == 'memcached': self.create_process_result(process_results, plottime, pid, 'memcached', memory_by_pid) elif memory_by_pid[pid]['name'] == 'collectd': self.create_process_result(process_results, plottime, pid, 'collectd', memory_by_pid) elif memory_by_pid[pid]['name'] == 'ruby': if 'evm_server.rb' in memory_by_pid[pid]['cmd']: self.create_process_result(process_results, plottime, pid, 'MIQ Server (evm_server.rb)', memory_by_pid) elif 'MIQ Server' in memory_by_pid[pid]['cmd']: self.create_process_result(process_results, plottime, pid, 'MIQ Server (evm_server.rb)', memory_by_pid) elif 'evm_watchdog.rb' in memory_by_pid[pid]['cmd']: self.create_process_result(process_results, plottime, pid, 'evm_watchdog.rb', memory_by_pid) elif 'appliance_console.rb' in memory_by_pid[pid]['cmd']: self.create_process_result(process_results, plottime, pid, 'appliance_console.rb', memory_by_pid) elif 'evm:dbsync:replicate' in memory_by_pid[pid]['cmd']: self.create_process_result(process_results, plottime, pid, 'evm:dbsync:replicate', memory_by_pid) else: logger.debug('Unaccounted for ruby pid: {}'.format(pid)) timediff = time.time() - starttime logger.debug('Monitoring sampled in {}s'.format(round(timediff, 4))) # Sleep Monitoring interval # Roughly 10s samples, accounts for collection of memory measurements time_to_sleep = abs(SAMPLE_INTERVAL - timediff) time.sleep(time_to_sleep) logger.info('Monitoring CFME Memory Terminating') create_report(self.scenario_data, appliance_results, process_results, self.use_slab, self.grafana_urls) def run(self): try: self._real_run() except Exception as e: logger.error('Error in Monitoring Thread: {}'.format(e)) logger.error('{}'.format(traceback.format_exc())) def install_smem(ssh_client): # smem is included by default in 5.6 appliances logger.info('Installing smem.') ver = get_version() if ver == '55': ssh_client.run_command('rpm -i {}'.format(cfme_performance['tools']['rpms']['epel7_rpm'])) ssh_client.run_command('yum install -y smem') # Patch smem to display longer command line names logger.info('Patching smem') ssh_client.run_command(r'sed -i s/\.27s/\.200s/g /usr/bin/smem') def create_report(scenario_data, appliance_results, process_results, use_slab, grafana_urls): logger.info('Creating Memory Monitoring Report.') ver = current_version() provider_names = 'No Providers' if 'providers' in scenario_data['scenario']: provider_names = ', '.join(scenario_data['scenario']['providers']) workload_path = results_path.join('{}-{}-{}'.format(test_ts, scenario_data['test_dir'], ver)) if not os.path.exists(str(workload_path)): os.makedirs(str(workload_path)) scenario_path = workload_path.join(scenario_data['scenario']['name']) if os.path.exists(str(scenario_path)): logger.warning('Duplicate Workload-Scenario Name: {}'.format(scenario_path)) scenario_path = workload_path.join('{}-{}'.format(time.strftime('%Y%m%d%H%M%S'), scenario_data['scenario']['name'])) logger.warning('Using: {}'.format(scenario_path)) os.mkdir(str(scenario_path)) mem_graphs_path = scenario_path.join('graphs') if not os.path.exists(str(mem_graphs_path)): os.mkdir(str(mem_graphs_path)) mem_rawdata_path = scenario_path.join('rawdata') if not os.path.exists(str(mem_rawdata_path)): os.mkdir(str(mem_rawdata_path)) graph_appliance_measurements(mem_graphs_path, ver, appliance_results, use_slab, provider_names) graph_individual_process_measurements(mem_graphs_path, process_results, provider_names) graph_same_miq_workers(mem_graphs_path, process_results, provider_names) graph_all_miq_workers(mem_graphs_path, process_results, provider_names) # Dump scenario Yaml: with open(str(scenario_path.join('scenario.yml')), 'w') as scenario_file: yaml.safe_dump(dict(scenario_data['scenario']), scenario_file, default_flow_style=False) generate_summary_csv(scenario_path.join('{}-summary.csv'.format(ver)), appliance_results, process_results, provider_names, ver) generate_raw_data_csv(mem_rawdata_path, appliance_results, process_results) generate_summary_html(scenario_path, ver, appliance_results, process_results, scenario_data, provider_names, grafana_urls) generate_workload_html(scenario_path, ver, scenario_data, provider_names, grafana_urls) logger.info('Finished Creating Report') def compile_per_process_results(procs_to_compile, process_results, ts_end): alive_pids = 0 recycled_pids = 0 total_running_rss = 0 total_running_pss = 0 total_running_uss = 0 total_running_vss = 0 total_running_swap = 0 for process in procs_to_compile: if process in process_results: for pid in process_results[process]: if ts_end in process_results[process][pid]: alive_pids += 1 total_running_rss += process_results[process][pid][ts_end]['rss'] total_running_pss += process_results[process][pid][ts_end]['pss'] total_running_uss += process_results[process][pid][ts_end]['uss'] total_running_vss += process_results[process][pid][ts_end]['vss'] total_running_swap += process_results[process][pid][ts_end]['swap'] else: recycled_pids += 1 return alive_pids, recycled_pids, total_running_rss, total_running_pss, total_running_uss, \ total_running_vss, total_running_swap def generate_raw_data_csv(directory, appliance_results, process_results): starttime = time.time() file_name = str(directory.join('appliance.csv')) with open(file_name, 'w') as csv_file: csv_file.write('TimeStamp,Total,Free,Used,Buffers,Cached,Slab,Swap_Total,Swap_Free\n') for ts in appliance_results: csv_file.write('{},{},{},{},{},{},{},{},{}\n'.format(ts, appliance_results[ts]['total'], appliance_results[ts]['free'], appliance_results[ts]['used'], appliance_results[ts]['buffers'], appliance_results[ts]['cached'], appliance_results[ts]['slab'], appliance_results[ts]['swap_total'], appliance_results[ts]['swap_free'])) for process_name in process_results: for process_pid in process_results[process_name]: file_name = str(directory.join('{}-{}.csv'.format(process_pid, process_name))) with open(file_name, 'w') as csv_file: csv_file.write('TimeStamp,RSS,PSS,USS,VSS,SWAP\n') for ts in process_results[process_name][process_pid]: csv_file.write('{},{},{},{},{},{}\n'.format(ts, process_results[process_name][process_pid][ts]['rss'], process_results[process_name][process_pid][ts]['pss'], process_results[process_name][process_pid][ts]['uss'], process_results[process_name][process_pid][ts]['vss'], process_results[process_name][process_pid][ts]['swap'])) timediff = time.time() - starttime logger.info('Generated Raw Data CSVs in: {}'.format(timediff)) def generate_summary_csv(file_name, appliance_results, process_results, provider_names, version_string): starttime = time.time() with open(str(file_name), 'w') as csv_file: csv_file.write('Version: {}, Provider(s): {}\n'.format(version_string, provider_names)) csv_file.write('Measurement,Start of test,End of test\n') start = list(appliance_results.keys())[0] end = list(appliance_results.keys())[-1] csv_file.write('Appliance Total Memory,{},{}\n'.format( round(appliance_results[start]['total'], 2), round(appliance_results[end]['total'], 2))) csv_file.write('Appliance Free Memory,{},{}\n'.format( round(appliance_results[start]['free'], 2), round(appliance_results[end]['free'], 2))) csv_file.write('Appliance Used Memory,{},{}\n'.format( round(appliance_results[start]['used'], 2), round(appliance_results[end]['used'], 2))) csv_file.write('Appliance Buffers,{},{}\n'.format( round(appliance_results[start]['buffers'], 2), round(appliance_results[end]['buffers'], 2))) csv_file.write('Appliance Cached,{},{}\n'.format( round(appliance_results[start]['cached'], 2), round(appliance_results[end]['cached'], 2))) csv_file.write('Appliance Slab,{},{}\n'.format( round(appliance_results[start]['slab'], 2), round(appliance_results[end]['slab'], 2))) csv_file.write('Appliance Total Swap,{},{}\n'.format( round(appliance_results[start]['swap_total'], 2), round(appliance_results[end]['swap_total'], 2))) csv_file.write('Appliance Free Swap,{},{}\n'.format( round(appliance_results[start]['swap_free'], 2), round(appliance_results[end]['swap_free'], 2))) summary_csv_measurement_dump(csv_file, process_results, 'rss') summary_csv_measurement_dump(csv_file, process_results, 'pss') summary_csv_measurement_dump(csv_file, process_results, 'uss') summary_csv_measurement_dump(csv_file, process_results, 'vss') summary_csv_measurement_dump(csv_file, process_results, 'swap') timediff = time.time() - starttime logger.info('Generated Summary CSV in: {}'.format(timediff)) def generate_summary_html(directory, version_string, appliance_results, process_results, scenario_data, provider_names, grafana_urls): starttime = time.time() file_name = str(directory.join('index.html')) with open(file_name, 'w') as html_file: html_file.write('<html>\n') html_file.write('<head><title>{} - {} Memory Usage Performance</title></head>'.format( version_string, provider_names)) html_file.write('<body>\n') html_file.write('<b>CFME {} {} Test Results</b><br>\n'.format(version_string, scenario_data['test_name'].title())) html_file.write('<b>Appliance Roles:</b> {}<br>\n'.format( scenario_data['appliance_roles'].replace(',', ', '))) html_file.write('<b>Provider(s):</b> {}<br>\n'.format(provider_names)) html_file.write('<b><a href=\'https://{}/\' target="_blank">{}</a></b>\n'.format( scenario_data['appliance_ip'], scenario_data['appliance_name'])) if grafana_urls: for g_name in sorted(grafana_urls.keys()): html_file.write( ' : <b><a href=\'{}\' target="_blank">{}</a></b>'.format(grafana_urls[g_name], g_name)) html_file.write('<br>\n') html_file.write('<b><a href=\'{}-summary.csv\'>Summary CSV</a></b>'.format(version_string)) html_file.write(' : <b><a href=\'workload.html\'>Workload Info</a></b>') html_file.write(' : <b><a href=\'graphs/\'>Graphs directory</a></b>\n') html_file.write(' : <b><a href=\'rawdata/\'>CSVs directory</a></b><br>\n') start = list(appliance_results.keys())[0] end = list(appliance_results.keys())[-1] timediff = end - start total_proc_count = 0 for proc_name in process_results: total_proc_count += len(list(process_results[proc_name].keys())) growth = appliance_results[end]['used'] - appliance_results[start]['used'] max_used_memory = 0 for ts in appliance_results: if appliance_results[ts]['used'] > max_used_memory: max_used_memory = appliance_results[ts]['used'] html_file.write('<table border="1">\n') html_file.write('<tr><td>\n') # Appliance Wide Results html_file.write('<table style="width:100%" border="1">\n') html_file.write('<tr>\n') html_file.write('<td><b>Version</b></td>\n') html_file.write('<td><b>Start Time</b></td>\n') html_file.write('<td><b>End Time</b></td>\n') html_file.write('<td><b>Total Test Time</b></td>\n') html_file.write('<td><b>Total Memory</b></td>\n') html_file.write('<td><b>Start Used Memory</b></td>\n') html_file.write('<td><b>End Used Memory</b></td>\n') html_file.write('<td><b>Used Memory Growth</b></td>\n') html_file.write('<td><b>Max Used Memory</b></td>\n') html_file.write('<td><b>Total Tracked Processes</b></td>\n') html_file.write('</tr>\n') html_file.write('<td><a href=\'rawdata/appliance.csv\'>{}</a></td>\n'.format( version_string)) html_file.write('<td>{}</td>\n'.format(start.replace(microsecond=0))) html_file.write('<td>{}</td>\n'.format(end.replace(microsecond=0))) html_file.write('<td>{}</td>\n'.format(str(timediff).partition('.')[0])) html_file.write('<td>{}</td>\n'.format(round(appliance_results[end]['total'], 2))) html_file.write('<td>{}</td>\n'.format(round(appliance_results[start]['used'], 2))) html_file.write('<td>{}</td>\n'.format(round(appliance_results[end]['used'], 2))) html_file.write('<td>{}</td>\n'.format(round(growth, 2))) html_file.write('<td>{}</td>\n'.format(round(max_used_memory, 2))) html_file.write('<td>{}</td>\n'.format(total_proc_count)) html_file.write('</table>\n') # CFME/Miq Worker Results html_file.write('<table style="width:100%" border="1">\n') html_file.write('<tr>\n') html_file.write('<td><b>Total CFME/Miq Workers</b></td>\n') html_file.write('<td><b>End Running Workers</b></td>\n') html_file.write('<td><b>Recycled Workers</b></td>\n') html_file.write('<td><b>End Total Worker RSS</b></td>\n') html_file.write('<td><b>End Total Worker PSS</b></td>\n') html_file.write('<td><b>End Total Worker USS</b></td>\n') html_file.write('<td><b>End Total Worker VSS</b></td>\n') html_file.write('<td><b>End Total Worker SWAP</b></td>\n') html_file.write('</tr>\n') a_pids, r_pids, t_rss, t_pss, t_uss, t_vss, t_swap = compile_per_process_results( miq_workers, process_results, end) html_file.write('<tr>\n') html_file.write('<td>{}</td>\n'.format(a_pids + r_pids)) html_file.write('<td>{}</td>\n'.format(a_pids)) html_file.write('<td>{}</td>\n'.format(r_pids)) html_file.write('<td>{}</td>\n'.format(round(t_rss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_pss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_uss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_vss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_swap, 2))) html_file.write('</tr>\n') html_file.write('</table>\n') # Per Process Summaries: html_file.write('<table style="width:100%" border="1">\n') html_file.write('<tr>\n') html_file.write('<td><b>Application/Process Group</b></td>\n') html_file.write('<td><b>Total Processes</b></td>\n') html_file.write('<td><b>End Running Processes</b></td>\n') html_file.write('<td><b>Recycled Processes</b></td>\n') html_file.write('<td><b>End Total Process RSS</b></td>\n') html_file.write('<td><b>End Total Process PSS</b></td>\n') html_file.write('<td><b>End Total Process USS</b></td>\n') html_file.write('<td><b>End Total Process VSS</b></td>\n') html_file.write('<td><b>End Total Process SWAP</b></td>\n') html_file.write('</tr>\n') a_pids, r_pids, t_rss, t_pss, t_uss, t_vss, t_swap = compile_per_process_results( ruby_processes, process_results, end) t_a_pids = a_pids t_r_pids = r_pids tt_rss = t_rss tt_pss = t_pss tt_uss = t_uss tt_vss = t_vss tt_swap = t_swap html_file.write('<tr>\n') html_file.write('<td>ruby</td>\n') html_file.write('<td>{}</td>\n'.format(a_pids + r_pids)) html_file.write('<td>{}</td>\n'.format(a_pids)) html_file.write('<td>{}</td>\n'.format(r_pids)) html_file.write('<td>{}</td>\n'.format(round(t_rss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_pss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_uss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_vss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_swap, 2))) html_file.write('</tr>\n') # memcached Summary a_pids, r_pids, t_rss, t_pss, t_uss, t_vss, t_swap = compile_per_process_results( ['memcached'], process_results, end) t_a_pids += a_pids t_r_pids += r_pids tt_rss += t_rss tt_pss += t_pss tt_uss += t_uss tt_vss += t_vss tt_swap += t_swap html_file.write('<tr>\n') html_file.write('<td>memcached</td>\n') html_file.write('<td>{}</td>\n'.format(a_pids + r_pids)) html_file.write('<td>{}</td>\n'.format(a_pids)) html_file.write('<td>{}</td>\n'.format(r_pids)) html_file.write('<td>{}</td>\n'.format(round(t_rss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_pss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_uss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_vss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_swap, 2))) html_file.write('</tr>\n') # Postgres Summary a_pids, r_pids, t_rss, t_pss, t_uss, t_vss, t_swap = compile_per_process_results( ['postgres'], process_results, end) t_a_pids += a_pids t_r_pids += r_pids tt_rss += t_rss tt_pss += t_pss tt_uss += t_uss tt_vss += t_vss tt_swap += t_swap html_file.write('<tr>\n') html_file.write('<td>postgres</td>\n') html_file.write('<td>{}</td>\n'.format(a_pids + r_pids)) html_file.write('<td>{}</td>\n'.format(a_pids)) html_file.write('<td>{}</td>\n'.format(r_pids)) html_file.write('<td>{}</td>\n'.format(round(t_rss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_pss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_uss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_vss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_swap, 2))) html_file.write('</tr>\n') # httpd Summary a_pids, r_pids, t_rss, t_pss, t_uss, t_vss, t_swap = compile_per_process_results(['httpd'], process_results, end) t_a_pids += a_pids t_r_pids += r_pids tt_rss += t_rss tt_pss += t_pss tt_uss += t_uss tt_vss += t_vss tt_swap += t_swap html_file.write('<tr>\n') html_file.write('<td>httpd</td>\n') html_file.write('<td>{}</td>\n'.format(a_pids + r_pids)) html_file.write('<td>{}</td>\n'.format(a_pids)) html_file.write('<td>{}</td>\n'.format(r_pids)) html_file.write('<td>{}</td>\n'.format(round(t_rss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_pss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_uss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_vss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_swap, 2))) html_file.write('</tr>\n') # collectd Summary a_pids, r_pids, t_rss, t_pss, t_uss, t_vss, t_swap = compile_per_process_results( ['collectd'], process_results, end) t_a_pids += a_pids t_r_pids += r_pids tt_rss += t_rss tt_pss += t_pss tt_uss += t_uss tt_vss += t_vss tt_swap += t_swap html_file.write('<tr>\n') html_file.write('<td>collectd</td>\n') html_file.write('<td>{}</td>\n'.format(a_pids + r_pids)) html_file.write('<td>{}</td>\n'.format(a_pids)) html_file.write('<td>{}</td>\n'.format(r_pids)) html_file.write('<td>{}</td>\n'.format(round(t_rss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_pss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_uss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_vss, 2))) html_file.write('<td>{}</td>\n'.format(round(t_swap, 2))) html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>total</td>\n') html_file.write('<td>{}</td>\n'.format(t_a_pids + t_r_pids)) html_file.write('<td>{}</td>\n'.format(t_a_pids)) html_file.write('<td>{}</td>\n'.format(t_r_pids)) html_file.write('<td>{}</td>\n'.format(round(tt_rss, 2))) html_file.write('<td>{}</td>\n'.format(round(tt_pss, 2))) html_file.write('<td>{}</td>\n'.format(round(tt_uss, 2))) html_file.write('<td>{}</td>\n'.format(round(tt_vss, 2))) html_file.write('<td>{}</td>\n'.format(round(tt_swap, 2))) html_file.write('</tr>\n') html_file.write('</table>\n') # Appliance Graph html_file.write('</td></tr><tr><td>\n') file_name = '{}-appliance_memory.png'.format(version_string) html_file.write('<img src=\'graphs/{}\'>\n'.format(file_name)) file_name = '{}-appliance_swap.png'.format(version_string) # Check for swap usage through out time frame: max_swap_used = 0 for ts in appliance_results: swap_used = appliance_results[ts]['swap_total'] - appliance_results[ts]['swap_free'] if swap_used > max_swap_used: max_swap_used = swap_used if max_swap_used < 10: # Less than 10MiB Max, then hide graph html_file.write('<br><a href=\'graphs/{}\'>Swap Graph '.format(file_name)) html_file.write('(Hidden, max_swap_used < 10 MiB)</a>\n') else: html_file.write('<img src=\'graphs/{}\'>\n'.format(file_name)) html_file.write('</td></tr><tr><td>\n') # Per Process Results html_file.write('<table style="width:100%" border="1"><tr>\n') html_file.write('<td><b>Process Name</b></td>\n') html_file.write('<td><b>Process Pid</b></td>\n') html_file.write('<td><b>Start Time</b></td>\n') html_file.write('<td><b>End Time</b></td>\n') html_file.write('<td><b>Time Alive</b></td>\n') html_file.write('<td><b>RSS Mem Start</b></td>\n') html_file.write('<td><b>RSS Mem End</b></td>\n') html_file.write('<td><b>RSS Mem Change</b></td>\n') html_file.write('<td><b>PSS Mem Start</b></td>\n') html_file.write('<td><b>PSS Mem End</b></td>\n') html_file.write('<td><b>PSS Mem Change</b></td>\n') html_file.write('<td><b>CSV</b></td>\n') html_file.write('</tr>\n') # By Worker Type Memory Used for ordered_name in process_order: if ordered_name in process_results: for pid in process_results[ordered_name]: start = list(process_results[ordered_name][pid].keys())[0] end = list(process_results[ordered_name][pid].keys())[-1] timediff = end - start html_file.write('<tr>\n') if len(process_results[ordered_name]) > 1: html_file.write('<td><a href=\'#{}\'>{}</a></td>\n'.format(ordered_name, ordered_name)) html_file.write('<td><a href=\'graphs/{}-{}.png\'>{}</a></td>\n'.format( ordered_name, pid, pid)) else: html_file.write('<td>{}</td>\n'.format(ordered_name)) html_file.write('<td><a href=\'#{}-{}.png\'>{}</a></td>\n'.format( ordered_name, pid, pid)) html_file.write('<td>{}</td>\n'.format(start.replace(microsecond=0))) html_file.write('<td>{}</td>\n'.format(end.replace(microsecond=0))) html_file.write('<td>{}</td>\n'.format(str(timediff).partition('.')[0])) rss_change = process_results[ordered_name][pid][end]['rss'] - \ process_results[ordered_name][pid][start]['rss'] html_file.write('<td>{}</td>\n'.format( round(process_results[ordered_name][pid][start]['rss'], 2))) html_file.write('<td>{}</td>\n'.format( round(process_results[ordered_name][pid][end]['rss'], 2))) html_file.write('<td>{}</td>\n'.format(round(rss_change, 2))) pss_change = process_results[ordered_name][pid][end]['pss'] - \ process_results[ordered_name][pid][start]['pss'] html_file.write('<td>{}</td>\n'.format( round(process_results[ordered_name][pid][start]['pss'], 2))) html_file.write('<td>{}</td>\n'.format( round(process_results[ordered_name][pid][end]['pss'], 2))) html_file.write('<td>{}</td>\n'.format(round(pss_change, 2))) html_file.write('<td><a href=\'rawdata/{}-{}.csv\'>csv</a></td>\n'.format( pid, ordered_name)) html_file.write('</tr>\n') else: logger.debug('Process/Worker not part of test: {}'.format(ordered_name)) html_file.write('</table>\n') # Worker Graphs for ordered_name in process_order: if ordered_name in process_results: html_file.write('<tr><td>\n') html_file.write('<div id=\'{}\'>Process name: {}</div><br>\n'.format( ordered_name, ordered_name)) if len(process_results[ordered_name]) > 1: file_name = '{}-all.png'.format(ordered_name) html_file.write('<img id=\'{}\' src=\'graphs/{}\'><br>\n'.format(file_name, file_name)) else: for pid in sorted(process_results[ordered_name]): file_name = '{}-{}.png'.format(ordered_name, pid) html_file.write('<img id=\'{}\' src=\'graphs/{}\'><br>\n'.format( file_name, file_name)) html_file.write('</td></tr>\n') html_file.write('</table>\n') html_file.write('</body>\n') html_file.write('</html>\n') timediff = time.time() - starttime logger.info('Generated Summary html in: {}'.format(timediff)) def generate_workload_html(directory, ver, scenario_data, provider_names, grafana_urls): starttime = time.time() file_name = str(directory.join('workload.html')) with open(file_name, 'w') as html_file: html_file.write('<html>\n') html_file.write('<head><title>{} - {}</title></head>'.format( scenario_data['test_name'], provider_names)) html_file.write('<body>\n') html_file.write('<b>CFME {} {} Test Results</b><br>\n'.format(ver, scenario_data['test_name'].title())) html_file.write('<b>Appliance Roles:</b> {}<br>\n'.format( scenario_data['appliance_roles'].replace(',', ', '))) html_file.write('<b>Provider(s):</b> {}<br>\n'.format(provider_names)) html_file.write('<b><a href=\'https://{}/\' target="_blank">{}</a></b>\n'.format( scenario_data['appliance_ip'], scenario_data['appliance_name'])) if grafana_urls: for g_name in sorted(grafana_urls.keys()): html_file.write( ' : <b><a href=\'{}\' target="_blank">{}</a></b>'.format(grafana_urls[g_name], g_name)) html_file.write('<br>\n') html_file.write('<b><a href=\'{}-summary.csv\'>Summary CSV</a></b>'.format(ver)) html_file.write(' : <b><a href=\'index.html\'>Memory Info</a></b>') html_file.write(' : <b><a href=\'graphs/\'>Graphs directory</a></b>\n') html_file.write(' : <b><a href=\'rawdata/\'>CSVs directory</a></b><br>\n') html_file.write('<br><b>Scenario Data: </b><br>\n') yaml_html = get_scenario_html(scenario_data['scenario']) html_file.write(yaml_html + '\n') html_file.write('<br>\n<br>\n<br>\n<b>Quantifier Data: </b>\n<br>\n<br>\n<br>\n<br>\n') html_file.write('<table border="1">\n') html_file.write('<tr>\n') html_file.write('<td><b><font size="4"> System Information</font></b></td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>\n') system_path = ('../version_info/system.csv') html_file.write('<a href="{}" download="System_Versions-{}-{}"> System Versions</a>' .format(system_path, test_ts, scenario_data['scenario']['name'])) html_file.write('</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>&nbsp</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>&nbsp</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td><b><font size="4"> Process Information</font></b></td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>\n') process_path = ('../version_info/processes.csv') html_file.write('<a href="{}" download="Process_Versions-{}-{}"> Process Versions</a>' .format(process_path, test_ts, scenario_data['scenario']['name'])) html_file.write('</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>&nbsp</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>&nbsp</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td><b><font size="4"> Ruby Gem Information</font></b></td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>\n') gems_path = ('../version_info/gems.csv') html_file.write('<a href="{}" download="Gem_Versions-{}-{}"> Ruby Gem Versions</a>' .format(gems_path, test_ts, scenario_data['scenario']['name'])) html_file.write('</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>&nbsp</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>&nbsp</td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td><b><font size="4"> RPM Information</font></b></td>\n') html_file.write('</tr>\n') html_file.write('<tr>\n') html_file.write('<td>\n') rpms_path = ('../version_info/rpms.csv') html_file.write('<a href="{}" download="RPM_Versions-{}-{}"> RPM Versions</a>' .format(rpms_path, test_ts, scenario_data['scenario']['name'])) html_file.write('</td>\n') html_file.write('</tr>\n') html_file.write('</table>\n') html_file.write('</body>\n') html_file.write('</html>\n') timediff = time.time() - starttime logger.info('Generated Workload html in: {}'.format(timediff)) def add_workload_quantifiers(quantifiers, scenario_data): starttime = time.time() ver = current_version() workload_path = results_path.join('{}-{}-{}'.format(test_ts, scenario_data['test_dir'], ver)) directory = workload_path.join(scenario_data['scenario']['name']) file_name = str(directory.join('workload.html')) marker = '<b>Quantifier Data: </b>' yaml_dict = quantifiers yaml_string = str(json.dumps(yaml_dict, indent=4)) yaml_html = yaml_string.replace('\n', '<br>\n') with open(file_name, 'r+') as html_file: line = '' while marker not in line: line = html_file.readline() marker_pos = html_file.tell() remainder = html_file.read() html_file.seek(marker_pos) html_file.write('{} \n'.format(yaml_html)) html_file.write(remainder) timediff = time.time() - starttime logger.info('Added quantifiers in: {}'.format(timediff)) def get_scenario_html(scenario_data): scenario_dict = create_dict(scenario_data) scenario_yaml = yaml.safe_dump(scenario_dict) scenario_html = scenario_yaml.replace('\n', '<br>\n') scenario_html = scenario_html.replace(', ', '<br>\n &nbsp;&nbsp;&nbsp;&nbsp;-&nbsp;') scenario_html = scenario_html.replace(' ', '&nbsp;') scenario_html = scenario_html.replace('[', '<br>\n &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;-&nbsp;') scenario_html = scenario_html.replace(']', '\n') return scenario_html def create_dict(attr_dict): main_dict = dict(attr_dict) for key, value in main_dict.items(): if type(value) == AttrDict: main_dict[key] = create_dict(value) return main_dict def graph_appliance_measurements(graphs_path, ver, appliance_results, use_slab, provider_names): import matplotlib as mpl mpl.use('Agg') import matplotlib.dates as mdates import matplotlib.pyplot as plt from cycler import cycler starttime = time.time() dates = list(appliance_results.keys()) total_memory_list = list(appliance_results[ts]['total'] for ts in appliance_results.keys()) free_memory_list = list(appliance_results[ts]['free'] for ts in appliance_results.keys()) used_memory_list = list(appliance_results[ts]['used'] for ts in appliance_results.keys()) buffers_memory_list = list(appliance_results[ts]['buffers'] for ts in appliance_results.keys()) cache_memory_list = list(appliance_results[ts]['cached'] for ts in appliance_results.keys()) slab_memory_list = list(appliance_results[ts]['slab'] for ts in appliance_results.keys()) swap_total_list = list(appliance_results[ts]['swap_total'] for ts in appliance_results.keys()) swap_free_list = list(appliance_results[ts]['swap_free'] for ts in appliance_results.keys()) # Stack Plot Memory Usage file_name = graphs_path.join('{}-appliance_memory.png'.format(ver)) mpl.rcParams['axes.prop_cycle'] = cycler('color', ['firebrick', 'coral', 'steelblue', 'forestgreen']) fig, ax = plt.subplots() plt.title('Provider(s): {}\nAppliance Memory'.format(provider_names)) plt.xlabel('Date / Time') plt.ylabel('Memory (MiB)') if use_slab: y = [used_memory_list, slab_memory_list, cache_memory_list, free_memory_list] else: y = [used_memory_list, buffers_memory_list, cache_memory_list, free_memory_list] plt.stackplot(dates, *y, baseline='zero') ax.annotate(str(round(total_memory_list[0], 2)), xy=(dates[0], total_memory_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(total_memory_list[-1], 2)), xy=(dates[-1], total_memory_list[-1]), xytext=(4, -4), textcoords='offset points') if use_slab: ax.annotate(str(round(slab_memory_list[0], 2)), xy=(dates[0], used_memory_list[0] + slab_memory_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(slab_memory_list[-1], 2)), xy=(dates[-1], used_memory_list[-1] + slab_memory_list[-1]), xytext=(4, -4), textcoords='offset points') ax.annotate(str(round(cache_memory_list[0], 2)), xy=(dates[0], used_memory_list[0] + slab_memory_list[0] + cache_memory_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(cache_memory_list[-1], 2)), xy=( dates[-1], used_memory_list[-1] + slab_memory_list[-1] + cache_memory_list[-1]), xytext=(4, -4), textcoords='offset points') else: ax.annotate(str(round(buffers_memory_list[0], 2)), xy=( dates[0], used_memory_list[0] + buffers_memory_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(buffers_memory_list[-1], 2)), xy=(dates[-1], used_memory_list[-1] + buffers_memory_list[-1]), xytext=(4, -4), textcoords='offset points') ax.annotate(str(round(cache_memory_list[0], 2)), xy=(dates[0], used_memory_list[0] + buffers_memory_list[0] + cache_memory_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(cache_memory_list[-1], 2)), xy=( dates[-1], used_memory_list[-1] + buffers_memory_list[-1] + cache_memory_list[-1]), xytext=(4, -4), textcoords='offset points') ax.annotate(str(round(used_memory_list[0], 2)), xy=(dates[0], used_memory_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(used_memory_list[-1], 2)), xy=(dates[-1], used_memory_list[-1]), xytext=(4, -4), textcoords='offset points') datefmt = mdates.DateFormatter('%m-%d %H-%M') ax.xaxis.set_major_formatter(datefmt) ax.grid(True) p1 = plt.Rectangle((0, 0), 1, 1, fc='firebrick') p2 = plt.Rectangle((0, 0), 1, 1, fc='coral') p3 = plt.Rectangle((0, 0), 1, 1, fc='steelblue') p4 = plt.Rectangle((0, 0), 1, 1, fc='forestgreen') if use_slab: ax.legend([p1, p2, p3, p4], ['Used', 'Slab', 'Cached', 'Free'], bbox_to_anchor=(1.45, 0.22), fancybox=True) else: ax.legend([p1, p2, p3, p4], ['Used', 'Buffers', 'Cached', 'Free'], bbox_to_anchor=(1.45, 0.22), fancybox=True) fig.autofmt_xdate() plt.savefig(str(file_name), bbox_inches='tight') plt.close() # Stack Plot Swap usage mpl.rcParams['axes.prop_cycle'] = cycler('color', ['firebrick', 'forestgreen']) file_name = graphs_path.join('{}-appliance_swap.png'.format(ver)) fig, ax = plt.subplots() plt.title('Provider(s): {}\nAppliance Swap'.format(provider_names)) plt.xlabel('Date / Time') plt.ylabel('Swap (MiB)') swap_used_list = [t - f for f, t in zip(swap_free_list, swap_total_list)] y = [swap_used_list, swap_free_list] plt.stackplot(dates, *y, baseline='zero') ax.annotate(str(round(swap_total_list[0], 2)), xy=(dates[0], swap_total_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(swap_total_list[-1], 2)), xy=(dates[-1], swap_total_list[-1]), xytext=(4, -4), textcoords='offset points') ax.annotate(str(round(swap_used_list[0], 2)), xy=(dates[0], swap_used_list[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(swap_used_list[-1], 2)), xy=(dates[-1], swap_used_list[-1]), xytext=(4, -4), textcoords='offset points') datefmt = mdates.DateFormatter('%m-%d %H-%M') ax.xaxis.set_major_formatter(datefmt) ax.grid(True) p1 = plt.Rectangle((0, 0), 1, 1, fc='firebrick') p2 = plt.Rectangle((0, 0), 1, 1, fc='forestgreen') ax.legend([p1, p2], ['Used Swap', 'Free Swap'], bbox_to_anchor=(1.45, 0.22), fancybox=True) fig.autofmt_xdate() plt.savefig(str(file_name), bbox_inches='tight') plt.close() # Reset Colors mpl.rcdefaults() timediff = time.time() - starttime logger.info('Plotted Appliance Memory in: {}'.format(timediff)) def graph_all_miq_workers(graph_file_path, process_results, provider_names): import matplotlib as mpl mpl.use('Agg') import matplotlib.dates as mdates import matplotlib.pyplot as plt starttime = time.time() file_name = graph_file_path.join('all-processes.png') fig, ax = plt.subplots() plt.title('Provider(s): {}\nAll Workers/Monitored Processes'.format(provider_names)) plt.xlabel('Date / Time') plt.ylabel('Memory (MiB)') for process_name in process_results: if 'Worker' in process_name or 'Handler' in process_name or 'Catcher' in process_name: for process_pid in process_results[process_name]: dates = list(process_results[process_name][process_pid].keys()) rss_samples = list(process_results[process_name][process_pid][ts]['rss'] for ts in process_results[process_name][process_pid].keys()) vss_samples = list(process_results[process_name][process_pid][ts]['vss'] for ts in process_results[process_name][process_pid].keys()) plt.plot(dates, rss_samples, linewidth=1, label='{} {} RSS'.format(process_pid, process_name)) plt.plot(dates, vss_samples, linewidth=1, label='{} {} VSS'.format( process_pid, process_name)) datefmt = mdates.DateFormatter('%m-%d %H-%M') ax.xaxis.set_major_formatter(datefmt) ax.grid(True) plt.legend(loc='upper center', bbox_to_anchor=(1.2, 0.1), fancybox=True) fig.autofmt_xdate() plt.savefig(str(file_name), bbox_inches='tight') plt.close() timediff = time.time() - starttime logger.info('Plotted All Type/Process Memory in: {}'.format(timediff)) def graph_individual_process_measurements(graph_file_path, process_results, provider_names): import matplotlib as mpl mpl.use('Agg') import matplotlib.dates as mdates import matplotlib.pyplot as plt starttime = time.time() for process_name in process_results: for process_pid in process_results[process_name]: file_name = graph_file_path.join('{}-{}.png'.format(process_name, process_pid)) dates = list(process_results[process_name][process_pid].keys()) rss_samples = list(process_results[process_name][process_pid][ts]['rss'] for ts in process_results[process_name][process_pid].keys()) pss_samples = list(process_results[process_name][process_pid][ts]['pss'] for ts in process_results[process_name][process_pid].keys()) uss_samples = list(process_results[process_name][process_pid][ts]['uss'] for ts in process_results[process_name][process_pid].keys()) vss_samples = list(process_results[process_name][process_pid][ts]['vss'] for ts in process_results[process_name][process_pid].keys()) swap_samples = list(process_results[process_name][process_pid][ts]['swap'] for ts in process_results[process_name][process_pid].keys()) fig, ax = plt.subplots() plt.title('Provider(s)/Size: {}\nProcess/Worker: {}\nPID: {}'.format(provider_names, process_name, process_pid)) plt.xlabel('Date / Time') plt.ylabel('Memory (MiB)') plt.plot(dates, rss_samples, linewidth=1, label='RSS') plt.plot(dates, pss_samples, linewidth=1, label='PSS') plt.plot(dates, uss_samples, linewidth=1, label='USS') plt.plot(dates, vss_samples, linewidth=1, label='VSS') plt.plot(dates, swap_samples, linewidth=1, label='Swap') if rss_samples: ax.annotate(str(round(rss_samples[0], 2)), xy=(dates[0], rss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(rss_samples[-1], 2)), xy=(dates[-1], rss_samples[-1]), xytext=(4, -4), textcoords='offset points') if pss_samples: ax.annotate(str(round(pss_samples[0], 2)), xy=(dates[0], pss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(pss_samples[-1], 2)), xy=(dates[-1], pss_samples[-1]), xytext=(4, -4), textcoords='offset points') if uss_samples: ax.annotate(str(round(uss_samples[0], 2)), xy=(dates[0], uss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(uss_samples[-1], 2)), xy=(dates[-1], uss_samples[-1]), xytext=(4, -4), textcoords='offset points') if vss_samples: ax.annotate(str(round(vss_samples[0], 2)), xy=(dates[0], vss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(vss_samples[-1], 2)), xy=(dates[-1], vss_samples[-1]), xytext=(4, -4), textcoords='offset points') if swap_samples: ax.annotate(str(round(swap_samples[0], 2)), xy=(dates[0], swap_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(swap_samples[-1], 2)), xy=(dates[-1], swap_samples[-1]), xytext=(4, -4), textcoords='offset points') datefmt = mdates.DateFormatter('%m-%d %H-%M') ax.xaxis.set_major_formatter(datefmt) ax.grid(True) plt.legend(loc='upper center', bbox_to_anchor=(1.2, 0.1), fancybox=True) fig.autofmt_xdate() plt.savefig(str(file_name), bbox_inches='tight') plt.close() timediff = time.time() - starttime logger.info('Plotted Individual Process Memory in: {}'.format(timediff)) def graph_same_miq_workers(graph_file_path, process_results, provider_names): import matplotlib as mpl mpl.use('Agg') import matplotlib.dates as mdates import matplotlib.pyplot as plt starttime = time.time() for process_name in process_results: if len(process_results[process_name]) > 1: logger.debug('Plotting {} {} processes on single graph.'.format( len(process_results[process_name]), process_name)) file_name = graph_file_path.join('{}-all.png'.format(process_name)) fig, ax = plt.subplots() pids = 'PIDs: ' for i, pid in enumerate(process_results[process_name], 1): pids = '{}{}'.format(pids, '{},{}'.format(pid, [' ', '\n'][i % 6 == 0])) pids = pids[0:-2] plt.title('Provider: {}\nProcess/Worker: {}\n{}'.format(provider_names, process_name, pids)) plt.xlabel('Date / Time') plt.ylabel('Memory (MiB)') for process_pid in process_results[process_name]: dates = list(process_results[process_name][process_pid].keys()) rss_samples = list(process_results[process_name][process_pid][ts]['rss'] for ts in process_results[process_name][process_pid].keys()) pss_samples = list(process_results[process_name][process_pid][ts]['pss'] for ts in process_results[process_name][process_pid].keys()) uss_samples = list(process_results[process_name][process_pid][ts]['uss'] for ts in process_results[process_name][process_pid].keys()) vss_samples = list(process_results[process_name][process_pid][ts]['vss'] for ts in process_results[process_name][process_pid].keys()) swap_samples = list(process_results[process_name][process_pid][ts]['swap'] for ts in process_results[process_name][process_pid].keys()) plt.plot(dates, rss_samples, linewidth=1, label='{} RSS'.format(process_pid)) plt.plot(dates, pss_samples, linewidth=1, label='{} PSS'.format(process_pid)) plt.plot(dates, uss_samples, linewidth=1, label='{} USS'.format(process_pid)) plt.plot(dates, vss_samples, linewidth=1, label='{} VSS'.format(process_pid)) plt.plot(dates, swap_samples, linewidth=1, label='{} SWAP'.format(process_pid)) if rss_samples: ax.annotate(str(round(rss_samples[0], 2)), xy=(dates[0], rss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(rss_samples[-1], 2)), xy=(dates[-1], rss_samples[-1]), xytext=(4, -4), textcoords='offset points') if pss_samples: ax.annotate(str(round(pss_samples[0], 2)), xy=(dates[0], pss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(pss_samples[-1], 2)), xy=(dates[-1], pss_samples[-1]), xytext=(4, -4), textcoords='offset points') if uss_samples: ax.annotate(str(round(uss_samples[0], 2)), xy=(dates[0], uss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(uss_samples[-1], 2)), xy=(dates[-1], uss_samples[-1]), xytext=(4, -4), textcoords='offset points') if vss_samples: ax.annotate(str(round(vss_samples[0], 2)), xy=(dates[0], vss_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(vss_samples[-1], 2)), xy=(dates[-1], vss_samples[-1]), xytext=(4, -4), textcoords='offset points') if swap_samples: ax.annotate(str(round(swap_samples[0], 2)), xy=(dates[0], swap_samples[0]), xytext=(4, 4), textcoords='offset points') ax.annotate(str(round(swap_samples[-1], 2)), xy=(dates[-1], swap_samples[-1]), xytext=(4, -4), textcoords='offset points') datefmt = mdates.DateFormatter('%m-%d %H-%M') ax.xaxis.set_major_formatter(datefmt) ax.grid(True) plt.legend(loc='upper center', bbox_to_anchor=(1.2, 0.1), fancybox=True) fig.autofmt_xdate() plt.savefig(str(file_name), bbox_inches='tight') plt.close() timediff = time.time() - starttime logger.info('Plotted Same Type/Process Memory in: {}'.format(timediff)) def summary_csv_measurement_dump(csv_file, process_results, measurement): csv_file.write('---------------------------------------------\n') csv_file.write('Per Process {} Memory Usage\n'.format(measurement.upper())) csv_file.write('---------------------------------------------\n') csv_file.write('Process/Worker Type,PID,Start of test,End of test\n') for ordered_name in process_order: if ordered_name in process_results: for process_pid in sorted(process_results[ordered_name]): start = list(process_results[ordered_name][process_pid].keys())[0] end = list(process_results[ordered_name][process_pid].keys())[-1] csv_file.write('{},{},{},{}\n'.format(ordered_name, process_pid, round(process_results[ordered_name][process_pid][start][measurement], 2), round(process_results[ordered_name][process_pid][end][measurement], 2)))
# -*- coding: utf-8 -*- # These tests don't work at the moment, due to the security_groups multi select not working # in selenium (the group is selected then immediately reset) import pytest from cfme.cloud.provider import CloudProvider from cfme.cloud.provider.azure import AzureProvider from cfme.cloud.provider.openstack import OpenStackProvider from cfme.infrastructure.provider import InfraProvider from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.infrastructure.provider.scvmm import SCVMMProvider from cfme.infrastructure.pxe import get_template_from_config from cfme.markers.env_markers.provider import providers from cfme.utils import ssh from cfme.utils.generators import random_vm_name from cfme.utils.log import logger from cfme.utils.providers import ProviderFilter from cfme.utils.wait import wait_for pf1 = ProviderFilter(classes=[CloudProvider, InfraProvider], required_fields=[['provisioning', 'ci-template']]) pf2 = ProviderFilter(classes=[SCVMMProvider], inverted=True) # SCVMM doesn't support cloud-init pytestmark = [ pytest.mark.meta(server_roles="+automate"), pytest.mark.provider(gen_func=providers, filters=[pf1, pf2], scope="module") ] def find_global_ipv6(vm): """ Find global IPv6 on a VM if present. Args: vm: InfraVm object Returns: IPv6 as a string if found, False otherwise """ all_ips = vm.mgmt.all_ips for ip in all_ips: if ':' in ip and not ip.startswith('fe80'): return ip return False @pytest.fixture(scope="module") def setup_ci_template(provider, appliance): cloud_init_template_name = provider.data['provisioning']['ci-template'] get_template_from_config( cloud_init_template_name, create=True, appliance=appliance) @pytest.fixture() def vm_name(): return random_vm_name('ci') @pytest.mark.rhv2 @pytest.mark.tier(3) def test_provision_cloud_init(appliance, request, setup_provider, provider, provisioning, setup_ci_template, vm_name): """ Tests provisioning from a template with cloud_init Metadata: test_flag: cloud_init, provision Bugzilla: 1619744 Polarion: assignee: jhenner initialEstimate: 1/4h casecomponent: Provisioning """ image = provisioning.get('ci-image') or provisioning['image']['name'] note = ('Testing provisioning from image {} to vm {} on provider {}'.format( image, vm_name, provider.key)) logger.info(note) mgmt_system = provider.mgmt inst_args = { 'request': {'notes': note}, 'customize': {'custom_template': {'name': provisioning['ci-template']}} } # for image selection in before_fill inst_args['template_name'] = image if provider.one_of(AzureProvider): inst_args['environment'] = {'public_ip_address': "New"} if provider.one_of(OpenStackProvider): ip_pool = provider.data['public_network'] floating_ip = mgmt_system.get_first_floating_ip(pool=ip_pool) provider.refresh_provider_relationships() inst_args['environment'] = {'public_ip_address': floating_ip} if provider.one_of(InfraProvider) and appliance.version > '5.9': inst_args['customize']['customize_type'] = 'Specification' logger.info('Instance args: {}'.format(inst_args)) collection = appliance.provider_based_collection(provider) instance = collection.create(vm_name, provider, form_values=inst_args) request.addfinalizer(instance.cleanup_on_provider) provision_request = provider.appliance.collections.requests.instantiate(vm_name, partial_check=True) provision_request.wait_for_request() wait_for(lambda: instance.ip_address is not None, num_sec=600) connect_ip = instance.ip_address assert connect_ip, "VM has no IP" # Check that we can at least get the uptime via ssh this should only be possible # if the username and password have been set via the cloud-init script so # is a valid check with ssh.SSHClient(hostname=connect_ip, username=provisioning['ci-username'], password=provisioning['ci-pass']) as ssh_client: wait_for(ssh_client.uptime, num_sec=200, handle_exception=True) @pytest.mark.rhv3 @pytest.mark.provider([RHEVMProvider], override=True) def test_provision_cloud_init_payload(appliance, request, setup_provider, provider, provisioning, vm_name): """ Tests that options specified in VM provisioning dialog in UI are properly passed as a cloud-init payload to the newly provisioned VM. Metadata: test_flag: cloud_init, provision Polarion: assignee: jhenner initialEstimate: 1/4h casecomponent: Provisioning """ image = provisioning.get('ci-image', None) if not image: pytest.skip('No ci-image found in provider specification.') note = ('Testing provisioning from image {image} to vm {vm} on provider {provider}'.format( image=image, vm=vm_name, provider=provider.key)) logger.info(note) ci_payload = { 'root_password': 'mysecret', 'address_mode': 'Static', 'hostname': 'cimachine', 'ip_address': '169.254.0.1', 'subnet_mask': '29', 'gateway': '169.254.0.2', 'dns_servers': '169.254.0.3', 'dns_suffixes': 'virt.lab.example.com', 'custom_template': {'name': 'oVirt cloud-init'} } inst_args = { 'request': {'notes': note}, 'customize': {'customize_type': 'Specification'}, 'template_name': image } inst_args['customize'].update(ci_payload) logger.info('Instance args: {}'.format(inst_args)) # Provision VM collection = appliance.provider_based_collection(provider) instance = collection.create(vm_name, provider, form_values=inst_args) request.addfinalizer(instance.cleanup_on_provider) provision_request = provider.appliance.collections.requests.instantiate(vm_name, partial_check=True) provision_request.wait_for_request() connect_ip = wait_for(find_global_ipv6, func_args=[instance], num_sec=600, delay=20).out logger.info('Connect IP: {}'.format(connect_ip)) # Connect to the newly provisioned VM with ssh.SSHClient(hostname=connect_ip, username='root', password=ci_payload['root_password']) as ssh_client: # Check that correct hostname has been set hostname_cmd = ssh_client.run_command('hostname') assert hostname_cmd.success assert hostname_cmd.output.strip() == ci_payload['hostname'] # Obtain network configuration script for eth0 and store it in a list network_cfg_cmd = ssh_client.run_command('cat /etc/sysconfig/network-scripts/ifcfg-eth0') assert network_cfg_cmd.success config_list = network_cfg_cmd.output.split('\n') # Compare contents of network script with cloud-init payload assert 'BOOTPROTO=none' in config_list, 'Address mode was not set to static' assert 'IPADDR={}'.format(ci_payload['ip_address']) in config_list assert 'PREFIX={}'.format(ci_payload['subnet_mask']) in config_list assert 'GATEWAY={}'.format(ci_payload['gateway']) in config_list assert 'DNS1={}'.format(ci_payload['dns_servers']) in config_list assert 'DOMAIN={}'.format(ci_payload['dns_suffixes']) in config_list
Yadnyawalkya/integration_tests
cfme/tests/cloud_infra_common/test_cloud_init_provisioning.py
cfme/utils/smem_memory_monitor.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Define the Enhanced Character-Separated-Values (ECSV) which allows for reading and writing all the meta data associated with an astropy Table object. """ import re from collections import OrderedDict import warnings import json import numpy as np from . import core, basic from astropy.table import meta, serialize from astropy.utils.data_info import serialize_context_as from astropy.utils.exceptions import AstropyUserWarning, AstropyWarning from astropy.io.ascii.core import convert_numpy __doctest_requires__ = {'Ecsv': ['yaml']} ECSV_VERSION = '1.0' DELIMITERS = (' ', ',') ECSV_DATATYPES = ( 'bool', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float16', 'float32', 'float64', 'float128', 'string') class EcsvHeader(basic.BasicHeader): """Header class for which the column definition line starts with the comment character. See the :class:`CommentedHeader` class for an example. """ def process_lines(self, lines): """Return only non-blank lines that start with the comment regexp. For these lines strip out the matching characters and leading/trailing whitespace.""" re_comment = re.compile(self.comment) for line in lines: line = line.strip() if not line: continue match = re_comment.match(line) if match: out = line[match.end():] if out: yield out else: # Stop iterating on first failed match for a non-blank line return def write(self, lines): """ Write header information in the ECSV ASCII format. This function is called at the point when preprocessing has been done to convert the input table columns to `self.cols` which is a list of `astropy.io.ascii.core.Column` objects. In particular `col.str_vals` is available for each column with the string representation of each column item for output. This format starts with a delimiter separated list of the column names in order to make this format readable by humans and simple csv-type readers. It then encodes the full table meta and column attributes and meta as YAML and pretty-prints this in the header. Finally the delimited column names are repeated again, for humans and readers that look for the *last* comment line as defining the column names. """ if self.splitter.delimiter not in DELIMITERS: raise ValueError('only space and comma are allowed for delimiter in ECSV format') # Now assemble the header dict that will be serialized by the YAML dumper header = {'cols': self.cols, 'schema': 'astropy-2.0'} if self.table_meta: header['meta'] = self.table_meta # Set the delimiter only for the non-default option(s) if self.splitter.delimiter != ' ': header['delimiter'] = self.splitter.delimiter header_yaml_lines = ([f'%ECSV {ECSV_VERSION}', '---'] + meta.get_yaml_from_header(header)) lines.extend([self.write_comment + line for line in header_yaml_lines]) lines.append(self.splitter.join([x.info.name for x in self.cols])) def write_comments(self, lines, meta): """ WRITE: Override the default write_comments to do nothing since this is handled in the custom write method. """ pass def update_meta(self, lines, meta): """ READ: Override the default update_meta to do nothing. This process is done in get_cols() for this reader. """ pass def get_cols(self, lines): """ READ: Initialize the header Column objects from the table ``lines``. Parameters ---------- lines : list List of table lines """ # Cache a copy of the original input lines before processing below raw_lines = lines # Extract non-blank comment (header) lines with comment character stripped lines = list(self.process_lines(lines)) # Validate that this is a ECSV file ecsv_header_re = r"""%ECSV [ ] (?P<major> \d+) \. (?P<minor> \d+) \.? (?P<bugfix> \d+)? $""" no_header_msg = ('ECSV header line like "# %ECSV <version>" not found as first line.' ' This is required for a ECSV file.') if not lines: raise core.InconsistentTableError(no_header_msg) match = re.match(ecsv_header_re, lines[0].strip(), re.VERBOSE) if not match: raise core.InconsistentTableError(no_header_msg) # ecsv_version could be constructed here, but it is not currently used. try: header = meta.get_header_from_yaml(lines) except ImportError as exc: if 'PyYAML package is required' in str(exc): warnings.warn("file looks like ECSV format but PyYAML is not installed " "so it cannot be parsed as ECSV", AstropyWarning) raise core.InconsistentTableError('unable to parse yaml in meta header' ' (PyYAML package is required)') except meta.YamlParseError: raise core.InconsistentTableError('unable to parse yaml in meta header') if 'meta' in header: self.table_meta = header['meta'] if 'delimiter' in header: delimiter = header['delimiter'] if delimiter not in DELIMITERS: raise ValueError('only space and comma are allowed for delimiter in ECSV format') self.splitter.delimiter = delimiter self.data.splitter.delimiter = delimiter # Create the list of io.ascii column objects from `header` header_cols = OrderedDict((x['name'], x) for x in header['datatype']) self.names = [x['name'] for x in header['datatype']] # Read the first non-commented line of table and split to get the CSV # header column names. This is essentially what the Basic reader does. header_line = next(super().process_lines(raw_lines)) header_names = next(self.splitter([header_line])) # Check for consistency of the ECSV vs. CSV header column names if header_names != self.names: raise core.InconsistentTableError('column names from ECSV header {} do not ' 'match names from header line of CSV data {}' .format(self.names, header_names)) # BaseHeader method to create self.cols, which is a list of # io.ascii.core.Column objects (*not* Table Column objects). self._set_cols_from_names() # Transfer attributes from the column descriptor stored in the input # header YAML metadata to the new columns to create this table. for col in self.cols: for attr in ('description', 'format', 'unit', 'meta', 'subtype'): if attr in header_cols[col.name]: setattr(col, attr, header_cols[col.name][attr]) col.dtype = header_cols[col.name]['datatype'] if col.dtype not in ECSV_DATATYPES: raise ValueError(f'datatype {col.dtype!r} of column {col.name!r} ' f'is not in allowed values {ECSV_DATATYPES}') # Subtype is written like "int64[2,null]" and we want to split this # out to "int64" and [2, None]. subtype = col.subtype if subtype and '[' in subtype: idx = subtype.index('[') col.subtype = subtype[:idx] col.shape = json.loads(subtype[idx:]) # Convert ECSV "string" to numpy "str" for attr in ('dtype', 'subtype'): if getattr(col, attr) == 'string': setattr(col, attr, 'str') # ECSV subtype of 'json' maps to numpy 'object' dtype if col.subtype == 'json': col.subtype = 'object' def _check_dtype_is_str(col): if col.dtype != 'str': raise ValueError(f'datatype of column {col.name!r} must be "string"') class EcsvOutputter(core.TableOutputter): """ After reading the input lines and processing, convert the Reader columns and metadata to an astropy.table.Table object. This overrides the default converters to be an empty list because there is no "guessing" of the conversion function. """ default_converters = [] def __call__(self, cols, meta): # Convert to a Table with all plain Column subclass columns out = super().__call__(cols, meta) # If mixin columns exist (based on the special '__mixin_columns__' # key in the table ``meta``), then use that information to construct # appropriate mixin columns and remove the original data columns. # If no __mixin_columns__ exists then this function just passes back # the input table. out = serialize._construct_mixins_from_columns(out) return out def _convert_vals(self, cols): """READ: Convert str_vals in `cols` to final arrays with correct dtypes. This is adapted from ``BaseOutputter._convert_vals``. In the case of ECSV there is no guessing and all types are known in advance. A big change is handling the possibility of JSON-encoded values, both unstructured object data and structured values that may contain masked data. """ for col in cols: try: # 1-d or N-d object columns are serialized as JSON. if col.subtype == 'object': _check_dtype_is_str(col) col_vals = [json.loads(val) for val in col.str_vals] col.data = np.empty([len(col_vals)] + col.shape, dtype=object) col.data[...] = col_vals # Variable length arrays with shape (n, m, ..., *) for fixed # n, m, .. and variable in last axis. Masked values here are # not currently supported. elif col.shape and col.shape[-1] is None: _check_dtype_is_str(col) # Empty (blank) values in original ECSV are changed to "0" # in str_vals with corresponding col.mask being created and # set accordingly. Instead use an empty list here. if hasattr(col, 'mask'): for idx in np.nonzero(col.mask)[0]: col.str_vals[idx] = '[]' # Remake as a 1-d object column of numpy ndarrays or # MaskedArray using the datatype specified in the ECSV file. col_vals = [] for str_val in col.str_vals: obj_val = json.loads(str_val) # list or nested lists try: arr_val = np.array(obj_val, dtype=col.subtype) except TypeError: # obj_val has entries that are inconsistent with # dtype. For a valid ECSV file the only possibility # is None values (indicating missing values). data = np.array(obj_val, dtype=object) # Replace all the None with an appropriate fill value mask = (data == None) # noqa: E711 kind = np.dtype(col.subtype).kind data[mask] = {'U': '', 'S': b''}.get(kind, 0) arr_val = np.ma.array(data.astype(col.subtype), mask=mask) col_vals.append(arr_val) col.shape = () col.dtype = np.dtype(object) # np.array(col_vals_arr, dtype=object) fails ?? so this workaround: col.data = np.empty(len(col_vals), dtype=object) col.data[:] = col_vals # Multidim columns with consistent shape (n, m, ...). These # might be masked. elif col.shape: _check_dtype_is_str(col) # Change empty (blank) values in original ECSV to something # like "[[null, null],[null,null]]" so subsequent JSON # decoding works. Delete `col.mask` so that later code in # core TableOutputter.__call__() that deals with col.mask # does not run (since handling is done here already). if hasattr(col, 'mask'): all_none_arr = np.full(shape=col.shape, fill_value=None, dtype=object) all_none_json = json.dumps(all_none_arr.tolist()) for idx in np.nonzero(col.mask)[0]: col.str_vals[idx] = all_none_json del col.mask col_vals = [json.loads(val) for val in col.str_vals] # Make a numpy object array of col_vals to look for None # (masked values) data = np.array(col_vals, dtype=object) mask = (data == None) # noqa: E711 if not np.any(mask): # No None's, just convert to required dtype col.data = data.astype(col.subtype) else: # Replace all the None with an appropriate fill value kind = np.dtype(col.subtype).kind data[mask] = {'U': '', 'S': b''}.get(kind, 0) # Finally make a MaskedArray with the filled data + mask col.data = np.ma.array(data.astype(col.subtype), mask=mask) # Regular scalar value column else: if col.subtype: warnings.warn(f'unexpected subtype {col.subtype!r} set for column ' f'{col.name!r}, using dtype={col.dtype!r} instead.', category=AstropyUserWarning) converter_func, _ = convert_numpy(col.dtype) col.data = converter_func(col.str_vals) if col.data.shape[1:] != tuple(col.shape): raise ValueError('shape mismatch between value and column specifier') except json.JSONDecodeError: raise ValueError(f'column {col.name!r} failed to convert: ' 'column value is not valid JSON') except Exception as exc: raise ValueError(f'column {col.name!r} failed to convert: {exc}') class EcsvData(basic.BasicData): def _set_fill_values(self, cols): """READ: Set the fill values of the individual cols based on fill_values of BaseData For ECSV handle the corner case of data that has been serialized using the serialize_method='data_mask' option, which writes the full data and mask directly, AND where that table includes a string column with zero-length string entries ("") which are valid data. Normally the super() method will set col.fill_value=('', '0') to replace blanks with a '0'. But for that corner case subset, instead do not do any filling. """ super()._set_fill_values(cols) # Get the serialized columns spec. It might not exist and there might # not even be any table meta, so punt in those cases. try: scs = self.header.table_meta['__serialized_columns__'] except (AttributeError, KeyError): return # Got some serialized columns, so check for string type and serialized # as a MaskedColumn. Without 'data_mask', MaskedColumn objects are # stored to ECSV as normal columns. for col in cols: if (col.dtype == 'str' and col.name in scs and scs[col.name]['__class__'] == 'astropy.table.column.MaskedColumn'): col.fill_values = {} # No data value replacement def str_vals(self): """WRITE: convert all values in table to a list of lists of strings This version considerably simplifies the base method: - No need to set fill values and column formats - No per-item formatting, just use repr() - Use JSON for object-type or multidim values - Only Column or MaskedColumn can end up as cols here. - Only replace masked values with "", not the generalized filling """ for col in self.cols: if len(col.shape) > 1 or col.info.dtype.kind == 'O': def format_col_item(idx): obj = col[idx] try: obj = obj.tolist() except AttributeError: pass return json.dumps(obj, separators=(',', ':')) else: def format_col_item(idx): return str(col[idx]) try: col.str_vals = [format_col_item(idx) for idx in range(len(col))] except TypeError as exc: raise TypeError(f'could not convert column {col.info.name!r}' f' to string: {exc}') from exc # Replace every masked value in a 1-d column with an empty string. # For multi-dim columns this gets done by JSON via "null". if hasattr(col, 'mask') and col.ndim == 1: for idx in col.mask.nonzero()[0]: col.str_vals[idx] = "" out = [col.str_vals for col in self.cols] return out class Ecsv(basic.Basic): """ECSV (Enhanced Character Separated Values) format table. Th ECSV format allows for specification of key table and column meta-data, in particular the data type and unit. See: https://github.com/astropy/astropy-APEs/blob/main/APE6.rst Examples -------- >>> from astropy.table import Table >>> ecsv_content = '''# %ECSV 0.9 ... # --- ... # datatype: ... # - {name: a, unit: m / s, datatype: int64, format: '%03d'} ... # - {name: b, unit: km, datatype: int64, description: This is column b} ... a b ... 001 2 ... 004 3 ... ''' >>> Table.read(ecsv_content, format='ascii.ecsv') <Table length=2> a b m / s km int64 int64 ----- ----- 001 2 004 3 """ _format_name = 'ecsv' _description = 'Enhanced CSV' _io_registry_suffix = '.ecsv' header_class = EcsvHeader data_class = EcsvData outputter_class = EcsvOutputter max_ndim = None # No limit on column dimensionality def update_table_data(self, table): """ Update table columns in place if mixin columns are present. This is a hook to allow updating the table columns after name filtering but before setting up to write the data. This is currently only used by ECSV and is otherwise just a pass-through. Parameters ---------- table : `astropy.table.Table` Input table for writing Returns ------- table : `astropy.table.Table` Output table for writing """ with serialize_context_as('ecsv'): out = serialize.represent_mixins_as_columns(table) return out
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst """Test initalization and other aspects of Angle and subclasses""" import pytest import numpy as np from numpy.testing import assert_allclose, assert_array_equal import threading from astropy.coordinates.angles import Longitude, Latitude, Angle from astropy import units as u from astropy.coordinates.errors import (IllegalSecondError, IllegalMinuteError, IllegalHourError, IllegalSecondWarning, IllegalMinuteWarning) def test_create_angles(): """ Tests creating and accessing Angle objects """ ''' The "angle" is a fundamental object. The internal representation is stored in radians, but this is transparent to the user. Units *must* be specified rather than a default value be assumed. This is as much for self-documenting code as anything else. Angle objects simply represent a single angular coordinate. More specific angular coordinates (e.g. Longitude, Latitude) are subclasses of Angle.''' a1 = Angle(54.12412, unit=u.degree) a2 = Angle("54.12412", unit=u.degree) a3 = Angle("54:07:26.832", unit=u.degree) a4 = Angle("54.12412 deg") a5 = Angle("54.12412 degrees") a6 = Angle("54.12412°") # because we like Unicode a7 = Angle((54, 7, 26.832), unit=u.degree) a8 = Angle("54°07'26.832\"") # (deg,min,sec) *tuples* are acceptable, but lists/arrays are *not* # because of the need to eventually support arrays of coordinates a9 = Angle([54, 7, 26.832], unit=u.degree) assert_allclose(a9.value, [54, 7, 26.832]) assert a9.unit is u.degree a10 = Angle(3.60827466667, unit=u.hour) a11 = Angle("3:36:29.7888000120", unit=u.hour) a12 = Angle((3, 36, 29.7888000120), unit=u.hour) # *must* be a tuple # Regression test for #5001 a13 = Angle((3, 36, 29.7888000120), unit='hour') Angle(0.944644098745, unit=u.radian) with pytest.raises(u.UnitsError): Angle(54.12412) # raises an exception because this is ambiguous with pytest.raises(u.UnitsError): Angle(54.12412, unit=u.m) with pytest.raises(ValueError): Angle(12.34, unit="not a unit") a14 = Angle("03h36m29.7888000120") # no trailing 's', but unambiguous a15 = Angle("5h4m3s") # single digits, no decimal assert a15.unit == u.hourangle a16 = Angle("1 d") a17 = Angle("1 degree") assert a16.degree == 1 assert a17.degree == 1 a18 = Angle("54 07.4472", unit=u.degree) a19 = Angle("54:07.4472", unit=u.degree) a20 = Angle("54d07.4472m", unit=u.degree) a21 = Angle("3h36m", unit=u.hour) a22 = Angle("3.6h", unit=u.hour) a23 = Angle("- 3h", unit=u.hour) a24 = Angle("+ 3h", unit=u.hour) # ensure the above angles that should match do assert a1 == a2 == a3 == a4 == a5 == a6 == a7 == a8 == a18 == a19 == a20 assert_allclose(a1.radian, a2.radian) assert_allclose(a2.degree, a3.degree) assert_allclose(a3.radian, a4.radian) assert_allclose(a4.radian, a5.radian) assert_allclose(a5.radian, a6.radian) assert_allclose(a6.radian, a7.radian) assert_allclose(a10.degree, a11.degree) assert a11 == a12 == a13 == a14 assert a21 == a22 assert a23 == -a24 # check for illegal ranges / values with pytest.raises(IllegalSecondError): a = Angle("12 32 99", unit=u.degree) with pytest.raises(IllegalMinuteError): a = Angle("12 99 23", unit=u.degree) with pytest.raises(IllegalSecondError): a = Angle("12 32 99", unit=u.hour) with pytest.raises(IllegalMinuteError): a = Angle("12 99 23", unit=u.hour) with pytest.raises(IllegalHourError): a = Angle("99 25 51.0", unit=u.hour) with pytest.raises(ValueError): a = Angle("12 25 51.0xxx", unit=u.hour) with pytest.raises(ValueError): a = Angle("12h34321m32.2s") assert a1 is not None def test_angle_from_view(): q = np.arange(3.) * u.deg a = q.view(Angle) assert type(a) is Angle assert a.unit is q.unit assert np.all(a == q) q2 = np.arange(4) * u.m with pytest.raises(u.UnitTypeError): q2.view(Angle) def test_angle_ops(): """ Tests operations on Angle objects """ # Angles can be added and subtracted. Multiplication and division by a # scalar is also permitted. A negative operator is also valid. All of # these operate in a single dimension. Attempting to multiply or divide two # Angle objects will return a quantity. An exception will be raised if it # is attempted to store output with a non-angular unit in an Angle [#2718]. a1 = Angle(3.60827466667, unit=u.hour) a2 = Angle("54:07:26.832", unit=u.degree) a1 + a2 # creates new Angle object a1 - a2 -a1 assert_allclose((a1 * 2).hour, 2 * 3.6082746666700003) assert abs((a1 / 3.123456).hour - 3.60827466667 / 3.123456) < 1e-10 # commutativity assert (2 * a1).hour == (a1 * 2).hour a3 = Angle(a1) # makes a *copy* of the object, but identical content as a1 assert_allclose(a1.radian, a3.radian) assert a1 is not a3 a4 = abs(-a1) assert a4.radian == a1.radian a5 = Angle(5.0, unit=u.hour) assert a5 > a1 assert a5 >= a1 assert a1 < a5 assert a1 <= a5 # check operations with non-angular result give Quantity. a6 = Angle(45., u.degree) a7 = a6 * a5 assert type(a7) is u.Quantity # but those with angular result yield Angle. # (a9 is regression test for #5327) a8 = a1 + 1.*u.deg assert type(a8) is Angle a9 = 1.*u.deg + a1 assert type(a9) is Angle with pytest.raises(TypeError): a6 *= a5 with pytest.raises(TypeError): a6 *= u.m with pytest.raises(TypeError): np.sin(a6, out=a6) def test_angle_methods(): # Most methods tested as part of the Quantity tests. # A few tests here which caused problems before: #8368 a = Angle([0., 2.], 'deg') a_mean = a.mean() assert type(a_mean) is Angle assert a_mean == 1. * u.degree a_std = a.std() assert type(a_std) is Angle assert a_std == 1. * u.degree a_var = a.var() assert type(a_var) is u.Quantity assert a_var == 1. * u.degree ** 2 a_ptp = a.ptp() assert type(a_ptp) is Angle assert a_ptp == 2. * u.degree a_max = a.max() assert type(a_max) is Angle assert a_max == 2. * u.degree a_min = a.min() assert type(a_min) is Angle assert a_min == 0. * u.degree def test_angle_convert(): """ Test unit conversion of Angle objects """ angle = Angle("54.12412", unit=u.degree) assert_allclose(angle.hour, 3.60827466667) assert_allclose(angle.radian, 0.944644098745) assert_allclose(angle.degree, 54.12412) assert len(angle.hms) == 3 assert isinstance(angle.hms, tuple) assert angle.hms[0] == 3 assert angle.hms[1] == 36 assert_allclose(angle.hms[2], 29.78879999999947) # also check that the namedtuple attribute-style access works: assert angle.hms.h == 3 assert angle.hms.m == 36 assert_allclose(angle.hms.s, 29.78879999999947) assert len(angle.dms) == 3 assert isinstance(angle.dms, tuple) assert angle.dms[0] == 54 assert angle.dms[1] == 7 assert_allclose(angle.dms[2], 26.831999999992036) # also check that the namedtuple attribute-style access works: assert angle.dms.d == 54 assert angle.dms.m == 7 assert_allclose(angle.dms.s, 26.831999999992036) assert isinstance(angle.dms[0], float) assert isinstance(angle.hms[0], float) # now make sure dms and signed_dms work right for negative angles negangle = Angle("-54.12412", unit=u.degree) assert negangle.dms.d == -54 assert negangle.dms.m == -7 assert_allclose(negangle.dms.s, -26.831999999992036) assert negangle.signed_dms.sign == -1 assert negangle.signed_dms.d == 54 assert negangle.signed_dms.m == 7 assert_allclose(negangle.signed_dms.s, 26.831999999992036) def test_angle_formatting(): """ Tests string formatting for Angle objects """ ''' The string method of Angle has this signature: def string(self, unit=DEGREE, decimal=False, sep=" ", precision=5, pad=False): The "decimal" parameter defaults to False since if you need to print the Angle as a decimal, there's no need to use the "format" method (see above). ''' angle = Angle("54.12412", unit=u.degree) # __str__ is the default `format` assert str(angle) == angle.to_string() res = 'Angle as HMS: 3h36m29.7888s' assert f"Angle as HMS: {angle.to_string(unit=u.hour)}" == res res = 'Angle as HMS: 3:36:29.7888' assert f"Angle as HMS: {angle.to_string(unit=u.hour, sep=':')}" == res res = 'Angle as HMS: 3:36:29.79' assert f"Angle as HMS: {angle.to_string(unit=u.hour, sep=':', precision=2)}" == res # Note that you can provide one, two, or three separators passed as a # tuple or list res = 'Angle as HMS: 3h36m29.7888s' assert "Angle as HMS: {}".format(angle.to_string(unit=u.hour, sep=("h", "m", "s"), precision=4)) == res res = 'Angle as HMS: 3-36|29.7888' assert "Angle as HMS: {}".format(angle.to_string(unit=u.hour, sep=["-", "|"], precision=4)) == res res = 'Angle as HMS: 3-36-29.7888' assert f"Angle as HMS: {angle.to_string(unit=u.hour, sep='-', precision=4)}" == res res = 'Angle as HMS: 03h36m29.7888s' assert f"Angle as HMS: {angle.to_string(unit=u.hour, precision=4, pad=True)}" == res # Same as above, in degrees angle = Angle("3 36 29.78880", unit=u.degree) res = 'Angle as DMS: 3d36m29.7888s' assert f"Angle as DMS: {angle.to_string(unit=u.degree)}" == res res = 'Angle as DMS: 3:36:29.7888' assert f"Angle as DMS: {angle.to_string(unit=u.degree, sep=':')}" == res res = 'Angle as DMS: 3:36:29.79' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep=":", precision=2)) == res # Note that you can provide one, two, or three separators passed as a # tuple or list res = 'Angle as DMS: 3d36m29.7888s' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep=("d", "m", "s"), precision=4)) == res res = 'Angle as DMS: 3-36|29.7888' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep=["-", "|"], precision=4)) == res res = 'Angle as DMS: 3-36-29.7888' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep="-", precision=4)) == res res = 'Angle as DMS: 03d36m29.7888s' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, precision=4, pad=True)) == res res = 'Angle as rad: 0.0629763rad' assert f"Angle as rad: {angle.to_string(unit=u.radian)}" == res res = 'Angle as rad decimal: 0.0629763' assert f"Angle as rad decimal: {angle.to_string(unit=u.radian, decimal=True)}" == res # check negative angles angle = Angle(-1.23456789, unit=u.degree) angle2 = Angle(-1.23456789, unit=u.hour) assert angle.to_string() == '-1d14m04.444404s' assert angle.to_string(pad=True) == '-01d14m04.444404s' assert angle.to_string(unit=u.hour) == '-0h04m56.2962936s' assert angle2.to_string(unit=u.hour, pad=True) == '-01h14m04.444404s' assert angle.to_string(unit=u.radian, decimal=True) == '-0.0215473' def test_to_string_vector(): # Regression test for the fact that vectorize doesn't work with Numpy 1.6 assert Angle([1./7., 1./7.], unit='deg').to_string()[0] == "0d08m34.28571429s" assert Angle([1./7.], unit='deg').to_string()[0] == "0d08m34.28571429s" assert Angle(1./7., unit='deg').to_string() == "0d08m34.28571429s" def test_angle_format_roundtripping(): """ Ensures that the string representation of an angle can be used to create a new valid Angle. """ a1 = Angle(0, unit=u.radian) a2 = Angle(10, unit=u.degree) a3 = Angle(0.543, unit=u.degree) a4 = Angle('1d2m3.4s') assert Angle(str(a1)).degree == a1.degree assert Angle(str(a2)).degree == a2.degree assert Angle(str(a3)).degree == a3.degree assert Angle(str(a4)).degree == a4.degree # also check Longitude/Latitude ra = Longitude('1h2m3.4s') dec = Latitude('1d2m3.4s') assert_allclose(Angle(str(ra)).degree, ra.degree) assert_allclose(Angle(str(dec)).degree, dec.degree) def test_radec(): """ Tests creation/operations of Longitude and Latitude objects """ ''' Longitude and Latitude are objects that are subclassed from Angle. As with Angle, Longitude and Latitude can parse any unambiguous format (tuples, formatted strings, etc.). The intention is not to create an Angle subclass for every possible coordinate object (e.g. galactic l, galactic b). However, equatorial Longitude/Latitude are so prevalent in astronomy that it's worth creating ones for these units. They will be noted as "special" in the docs and use of the just the Angle class is to be used for other coordinate systems. ''' with pytest.raises(u.UnitsError): ra = Longitude("4:08:15.162342") # error - hours or degrees? with pytest.raises(u.UnitsError): ra = Longitude("-4:08:15.162342") # the "smart" initializer allows >24 to automatically do degrees, but the # Angle-based one does not # TODO: adjust in 0.3 for whatever behavior is decided on # ra = Longitude("26:34:15.345634") # unambiguous b/c hours don't go past 24 # assert_allclose(ra.degree, 26.570929342) with pytest.raises(u.UnitsError): ra = Longitude("26:34:15.345634") # ra = Longitude(68) with pytest.raises(u.UnitsError): ra = Longitude(68) with pytest.raises(u.UnitsError): ra = Longitude(12) with pytest.raises(ValueError): ra = Longitude("garbage containing a d and no units") ra = Longitude("12h43m23s") assert_allclose(ra.hour, 12.7230555556) ra = Longitude((56, 14, 52.52), unit=u.degree) # can accept tuples # TODO: again, fix based on >24 behavior # ra = Longitude((56,14,52.52)) with pytest.raises(u.UnitsError): ra = Longitude((56, 14, 52.52)) with pytest.raises(u.UnitsError): ra = Longitude((12, 14, 52)) # ambiguous w/o units ra = Longitude((12, 14, 52), unit=u.hour) ra = Longitude([56, 64, 52.2], unit=u.degree) # ...but not arrays (yet) # Units can be specified ra = Longitude("4:08:15.162342", unit=u.hour) # TODO: this was the "smart" initializer behavior - adjust in 0.3 appropriately # Where Longitude values are commonly found in hours or degrees, declination is # nearly always specified in degrees, so this is the default. # dec = Latitude("-41:08:15.162342") with pytest.raises(u.UnitsError): dec = Latitude("-41:08:15.162342") dec = Latitude("-41:08:15.162342", unit=u.degree) # same as above def test_negative_zero_dms(): # Test for DMS parser a = Angle('-00:00:10', u.deg) assert_allclose(a.degree, -10. / 3600.) # Unicode minus a = Angle('−00:00:10', u.deg) assert_allclose(a.degree, -10. / 3600.) def test_negative_zero_dm(): # Test for DM parser a = Angle('-00:10', u.deg) assert_allclose(a.degree, -10. / 60.) def test_negative_zero_hms(): # Test for HMS parser a = Angle('-00:00:10', u.hour) assert_allclose(a.hour, -10. / 3600.) def test_negative_zero_hm(): # Test for HM parser a = Angle('-00:10', u.hour) assert_allclose(a.hour, -10. / 60.) def test_negative_sixty_hm(): # Test for HM parser with pytest.warns(IllegalMinuteWarning): a = Angle('-00:60', u.hour) assert_allclose(a.hour, -1.) def test_plus_sixty_hm(): # Test for HM parser with pytest.warns(IllegalMinuteWarning): a = Angle('00:60', u.hour) assert_allclose(a.hour, 1.) def test_negative_fifty_nine_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('-00:59:60', u.deg) assert_allclose(a.degree, -1.) def test_plus_fifty_nine_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('+00:59:60', u.deg) assert_allclose(a.degree, 1.) def test_negative_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('-00:00:60', u.deg) assert_allclose(a.degree, -1. / 60.) def test_plus_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('+00:00:60', u.deg) assert_allclose(a.degree, 1. / 60.) def test_angle_to_is_angle(): with pytest.warns(IllegalSecondWarning): a = Angle('00:00:60', u.deg) assert isinstance(a, Angle) assert isinstance(a.to(u.rad), Angle) def test_angle_to_quantity(): with pytest.warns(IllegalSecondWarning): a = Angle('00:00:60', u.deg) q = u.Quantity(a) assert isinstance(q, u.Quantity) assert q.unit is u.deg def test_quantity_to_angle(): a = Angle(1.0*u.deg) assert isinstance(a, Angle) with pytest.raises(u.UnitsError): Angle(1.0*u.meter) a = Angle(1.0*u.hour) assert isinstance(a, Angle) assert a.unit is u.hourangle with pytest.raises(u.UnitsError): Angle(1.0*u.min) def test_angle_string(): with pytest.warns(IllegalSecondWarning): a = Angle('00:00:60', u.deg) assert str(a) == '0d01m00s' a = Angle('00:00:59S', u.deg) assert str(a) == '-0d00m59s' a = Angle('00:00:59N', u.deg) assert str(a) == '0d00m59s' a = Angle('00:00:59E', u.deg) assert str(a) == '0d00m59s' a = Angle('00:00:59W', u.deg) assert str(a) == '-0d00m59s' a = Angle('-00:00:10', u.hour) assert str(a) == '-0h00m10s' a = Angle('00:00:59E', u.hour) assert str(a) == '0h00m59s' a = Angle('00:00:59W', u.hour) assert str(a) == '-0h00m59s' a = Angle(3.2, u.radian) assert str(a) == '3.2rad' a = Angle(4.2, u.microarcsecond) assert str(a) == '4.2uarcsec' a = Angle('1.0uarcsec') assert a.value == 1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecN') assert a.value == 1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecS') assert a.value == -1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecE') assert a.value == 1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecW') assert a.value == -1.0 assert a.unit == u.microarcsecond a = Angle("3d") assert_allclose(a.value, 3.0) assert a.unit == u.degree a = Angle("3dN") assert str(a) == "3d00m00s" assert a.unit == u.degree a = Angle("3dS") assert str(a) == "-3d00m00s" assert a.unit == u.degree a = Angle("3dE") assert str(a) == "3d00m00s" assert a.unit == u.degree a = Angle("3dW") assert str(a) == "-3d00m00s" assert a.unit == u.degree a = Angle('10"') assert_allclose(a.value, 10.0) assert a.unit == u.arcsecond a = Angle("10'N") assert_allclose(a.value, 10.0) assert a.unit == u.arcminute a = Angle("10'S") assert_allclose(a.value, -10.0) assert a.unit == u.arcminute a = Angle("10'E") assert_allclose(a.value, 10.0) assert a.unit == u.arcminute a = Angle("10'W") assert_allclose(a.value, -10.0) assert a.unit == u.arcminute a = Angle('45°55′12″N') assert str(a) == '45d55m12s' assert_allclose(a.value, 45.92) assert a.unit == u.deg a = Angle('45°55′12″S') assert str(a) == '-45d55m12s' assert_allclose(a.value, -45.92) assert a.unit == u.deg a = Angle('45°55′12″E') assert str(a) == '45d55m12s' assert_allclose(a.value, 45.92) assert a.unit == u.deg a = Angle('45°55′12″W') assert str(a) == '-45d55m12s' assert_allclose(a.value, -45.92) assert a.unit == u.deg with pytest.raises(ValueError): Angle('00h00m10sN') with pytest.raises(ValueError): Angle('45°55′12″NS') def test_angle_repr(): assert 'Angle' in repr(Angle(0, u.deg)) assert 'Longitude' in repr(Longitude(0, u.deg)) assert 'Latitude' in repr(Latitude(0, u.deg)) a = Angle(0, u.deg) repr(a) def test_large_angle_representation(): """Test that angles above 360 degrees can be output as strings, in repr, str, and to_string. (regression test for #1413)""" a = Angle(350, u.deg) + Angle(350, u.deg) a.to_string() a.to_string(u.hourangle) repr(a) repr(a.to(u.hourangle)) str(a) str(a.to(u.hourangle)) def test_wrap_at_inplace(): a = Angle([-20, 150, 350, 360] * u.deg) out = a.wrap_at('180d', inplace=True) assert out is None assert np.all(a.degree == np.array([-20., 150., -10., 0.])) def test_latitude(): with pytest.raises(ValueError): lat = Latitude(['91d', '89d']) with pytest.raises(ValueError): lat = Latitude('-91d') lat = Latitude(['90d', '89d']) # check that one can get items assert lat[0] == 90 * u.deg assert lat[1] == 89 * u.deg # and that comparison with angles works assert np.all(lat == Angle(['90d', '89d'])) # check setitem works lat[1] = 45. * u.deg assert np.all(lat == Angle(['90d', '45d'])) # but not with values out of range with pytest.raises(ValueError): lat[0] = 90.001 * u.deg with pytest.raises(ValueError): lat[0] = -90.001 * u.deg # these should also not destroy input (#1851) assert np.all(lat == Angle(['90d', '45d'])) # conserve type on unit change (closes #1423) angle = lat.to('radian') assert type(angle) is Latitude # but not on calculations angle = lat - 190 * u.deg assert type(angle) is Angle assert angle[0] == -100 * u.deg lat = Latitude('80d') angle = lat / 2. assert type(angle) is Angle assert angle == 40 * u.deg angle = lat * 2. assert type(angle) is Angle assert angle == 160 * u.deg angle = -lat assert type(angle) is Angle assert angle == -80 * u.deg # Test errors when trying to interoperate with longitudes. with pytest.raises(TypeError) as excinfo: lon = Longitude(10, 'deg') lat = Latitude(lon) assert "A Latitude angle cannot be created from a Longitude angle" in str(excinfo.value) with pytest.raises(TypeError) as excinfo: lon = Longitude(10, 'deg') lat = Latitude([20], 'deg') lat[0] = lon assert "A Longitude angle cannot be assigned to a Latitude angle" in str(excinfo.value) # Check we can work around the Lat vs Long checks by casting explicitly to Angle. lon = Longitude(10, 'deg') lat = Latitude(Angle(lon)) assert lat.value == 10.0 # Check setitem. lon = Longitude(10, 'deg') lat = Latitude([20], 'deg') lat[0] = Angle(lon) assert lat.value[0] == 10.0 def test_longitude(): # Default wrapping at 360d with an array input lon = Longitude(['370d', '88d']) assert np.all(lon == Longitude(['10d', '88d'])) assert np.all(lon == Angle(['10d', '88d'])) # conserve type on unit change and keep wrap_angle (closes #1423) angle = lon.to('hourangle') assert type(angle) is Longitude assert angle.wrap_angle == lon.wrap_angle angle = lon[0] assert type(angle) is Longitude assert angle.wrap_angle == lon.wrap_angle angle = lon[1:] assert type(angle) is Longitude assert angle.wrap_angle == lon.wrap_angle # but not on calculations angle = lon / 2. assert np.all(angle == Angle(['5d', '44d'])) assert type(angle) is Angle assert not hasattr(angle, 'wrap_angle') angle = lon * 2. + 400 * u.deg assert np.all(angle == Angle(['420d', '576d'])) assert type(angle) is Angle # Test setting a mutable value and having it wrap lon[1] = -10 * u.deg assert np.all(lon == Angle(['10d', '350d'])) # Test wrapping and try hitting some edge cases lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian) assert np.all(lon.degree == np.array([0., 90, 180, 270, 0])) lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian, wrap_angle='180d') assert np.all(lon.degree == np.array([0., 90, -180, -90, 0])) # Wrap on setting wrap_angle property (also test auto-conversion of wrap_angle to an Angle) lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian) lon.wrap_angle = '180d' assert np.all(lon.degree == np.array([0., 90, -180, -90, 0])) lon = Longitude('460d') assert lon == Angle('100d') lon.wrap_angle = '90d' assert lon == Angle('-260d') # check that if we initialize a longitude with another longitude, # wrap_angle is kept by default lon2 = Longitude(lon) assert lon2.wrap_angle == lon.wrap_angle # but not if we explicitly set it lon3 = Longitude(lon, wrap_angle='180d') assert lon3.wrap_angle == 180 * u.deg # check for problem reported in #2037 about Longitude initializing to -0 lon = Longitude(0, u.deg) lonstr = lon.to_string() assert not lonstr.startswith('-') # also make sure dtype is correctly conserved assert Longitude(0, u.deg, dtype=float).dtype == np.dtype(float) assert Longitude(0, u.deg, dtype=int).dtype == np.dtype(int) # Test errors when trying to interoperate with latitudes. with pytest.raises(TypeError) as excinfo: lat = Latitude(10, 'deg') lon = Longitude(lat) assert "A Longitude angle cannot be created from a Latitude angle" in str(excinfo.value) with pytest.raises(TypeError) as excinfo: lat = Latitude(10, 'deg') lon = Longitude([20], 'deg') lon[0] = lat assert "A Latitude angle cannot be assigned to a Longitude angle" in str(excinfo.value) # Check we can work around the Lat vs Long checks by casting explicitly to Angle. lat = Latitude(10, 'deg') lon = Longitude(Angle(lat)) assert lon.value == 10.0 # Check setitem. lat = Latitude(10, 'deg') lon = Longitude([20], 'deg') lon[0] = Angle(lat) assert lon.value[0] == 10.0 def test_wrap_at(): a = Angle([-20, 150, 350, 360] * u.deg) assert np.all(a.wrap_at(360 * u.deg).degree == np.array([340., 150., 350., 0.])) assert np.all(a.wrap_at(Angle(360, unit=u.deg)).degree == np.array([340., 150., 350., 0.])) assert np.all(a.wrap_at('360d').degree == np.array([340., 150., 350., 0.])) assert np.all(a.wrap_at('180d').degree == np.array([-20., 150., -10., 0.])) assert np.all(a.wrap_at(np.pi * u.rad).degree == np.array([-20., 150., -10., 0.])) # Test wrapping a scalar Angle a = Angle('190d') assert a.wrap_at('180d') == Angle('-170d') a = Angle(np.arange(-1000.0, 1000.0, 0.125), unit=u.deg) for wrap_angle in (270, 0.2, 0.0, 360.0, 500, -2000.125): aw = a.wrap_at(wrap_angle * u.deg) assert np.all(aw.degree >= wrap_angle - 360.0) assert np.all(aw.degree < wrap_angle) aw = a.to(u.rad).wrap_at(wrap_angle * u.deg) assert np.all(aw.degree >= wrap_angle - 360.0) assert np.all(aw.degree < wrap_angle) def test_is_within_bounds(): a = Angle([-20, 150, 350] * u.deg) assert a.is_within_bounds('0d', '360d') is False assert a.is_within_bounds(None, '360d') is True assert a.is_within_bounds(-30 * u.deg, None) is True a = Angle('-20d') assert a.is_within_bounds('0d', '360d') is False assert a.is_within_bounds(None, '360d') is True assert a.is_within_bounds(-30 * u.deg, None) is True def test_angle_mismatched_unit(): a = Angle('+6h7m8s', unit=u.degree) assert_allclose(a.value, 91.78333333333332) def test_regression_formatting_negative(): # Regression test for a bug that caused: # # >>> Angle(-1., unit='deg').to_string() # '-1d00m-0s' assert Angle(-0., unit='deg').to_string() == '-0d00m00s' assert Angle(-1., unit='deg').to_string() == '-1d00m00s' assert Angle(-0., unit='hour').to_string() == '-0h00m00s' assert Angle(-1., unit='hour').to_string() == '-1h00m00s' def test_regression_formatting_default_precision(): # Regression test for issue #11140 assert Angle('10:20:30.12345678d').to_string() == '10d20m30.12345678s' assert Angle('10d20m30.123456784564s').to_string() == '10d20m30.12345678s' assert Angle('10d20m30.123s').to_string() == '10d20m30.123s' def test_empty_sep(): a = Angle('05h04m31.93830s') assert a.to_string(sep='', precision=2, pad=True) == '050431.94' def test_create_tuple(): """ Tests creation of an angle with a (d,m,s) or (h,m,s) tuple """ a1 = Angle((1, 30, 0), unit=u.degree) assert a1.value == 1.5 a1 = Angle((1, 30, 0), unit=u.hourangle) assert a1.value == 1.5 def test_list_of_quantities(): a1 = Angle([1*u.deg, 1*u.hourangle]) assert a1.unit == u.deg assert_allclose(a1.value, [1, 15]) a2 = Angle([1*u.hourangle, 1*u.deg], u.deg) assert a2.unit == u.deg assert_allclose(a2.value, [15, 1]) def test_multiply_divide(): # Issue #2273 a1 = Angle([1, 2, 3], u.deg) a2 = Angle([4, 5, 6], u.deg) a3 = a1 * a2 assert_allclose(a3.value, [4, 10, 18]) assert a3.unit == (u.deg * u.deg) a3 = a1 / a2 assert_allclose(a3.value, [.25, .4, .5]) assert a3.unit == u.dimensionless_unscaled def test_mixed_string_and_quantity(): a1 = Angle(['1d', 1. * u.deg]) assert_array_equal(a1.value, [1., 1.]) assert a1.unit == u.deg a2 = Angle(['1d', 1 * u.rad * np.pi, '3d']) assert_array_equal(a2.value, [1., 180., 3.]) assert a2.unit == u.deg def test_array_angle_tostring(): aobj = Angle([1, 2], u.deg) assert aobj.to_string().dtype.kind == 'U' assert np.all(aobj.to_string() == ['1d00m00s', '2d00m00s']) def test_wrap_at_without_new(): """ Regression test for subtle bugs from situations where an Angle is created via numpy channels that don't do the standard __new__ but instead depend on array_finalize to set state. Longitude is used because the bug was in its _wrap_angle not getting initialized correctly """ l1 = Longitude([1]*u.deg) l2 = Longitude([2]*u.deg) l = np.concatenate([l1, l2]) assert l._wrap_angle is not None def test__str__(): """ Check the __str__ method used in printing the Angle """ # scalar angle scangle = Angle('10.2345d') strscangle = scangle.__str__() assert strscangle == '10d14m04.2s' # non-scalar array angles arrangle = Angle(['10.2345d', '-20d']) strarrangle = arrangle.__str__() assert strarrangle == '[10d14m04.2s -20d00m00s]' # summarizing for large arrays, ... should appear bigarrangle = Angle(np.ones(10000), u.deg) assert '...' in bigarrangle.__str__() def test_repr_latex(): """ Check the _repr_latex_ method, used primarily by IPython notebooks """ # try with both scalar scangle = Angle(2.1, u.deg) rlscangle = scangle._repr_latex_() # and array angles arrangle = Angle([1, 2.1], u.deg) rlarrangle = arrangle._repr_latex_() assert rlscangle == r'$2^\circ06{}^\prime00{}^{\prime\prime}$' assert rlscangle.split('$')[1] in rlarrangle # make sure the ... appears for large arrays bigarrangle = Angle(np.ones(50000)/50000., u.deg) assert '...' in bigarrangle._repr_latex_() def test_angle_with_cds_units_enabled(): """Regression test for #5350 Especially the example in https://github.com/astropy/astropy/issues/5350#issuecomment-248770151 """ from astropy.units import cds # the problem is with the parser, so remove it temporarily from astropy.coordinates.angle_formats import _AngleParser del _AngleParser._thread_local._parser with cds.enable(): Angle('5d') del _AngleParser._thread_local._parser Angle('5d') def test_longitude_nan(): # Check that passing a NaN to Longitude doesn't raise a warning Longitude([0, np.nan, 1] * u.deg) def test_latitude_nan(): # Check that passing a NaN to Latitude doesn't raise a warning Latitude([0, np.nan, 1] * u.deg) def test_angle_wrap_at_nan(): # Check that passing a NaN to Latitude doesn't raise a warning Angle([0, np.nan, 1] * u.deg).wrap_at(180*u.deg) def test_angle_multithreading(): """ Regression test for issue #7168 """ angles = ['00:00:00']*10000 def parse_test(i=0): Angle(angles, unit='hour') for i in range(10): threading.Thread(target=parse_test, args=(i,)).start()
lpsinger/astropy
astropy/coordinates/tests/test_angles.py
astropy/io/ascii/ecsv.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The astropy.utils.iers package provides access to the tables provided by the International Earth Rotation and Reference Systems Service, in particular allowing interpolation of published UT1-UTC values for given times. These are used in `astropy.time` to provide UT1 values. The polar motions are also used for determining earth orientation for celestial-to-terrestrial coordinate transformations (in `astropy.coordinates`). """ import re from datetime import datetime from warnings import warn from urllib.parse import urlparse import numpy as np import erfa from astropy.time import Time, TimeDelta from astropy import config as _config from astropy import units as u from astropy.table import QTable, MaskedColumn from astropy.utils.data import (get_pkg_data_filename, clear_download_cache, is_url_in_cache, get_readable_fileobj) from astropy.utils.state import ScienceState from astropy import utils from astropy.utils.exceptions import AstropyWarning __all__ = ['Conf', 'conf', 'earth_orientation_table', 'IERS', 'IERS_B', 'IERS_A', 'IERS_Auto', 'FROM_IERS_B', 'FROM_IERS_A', 'FROM_IERS_A_PREDICTION', 'TIME_BEFORE_IERS_RANGE', 'TIME_BEYOND_IERS_RANGE', 'IERS_A_FILE', 'IERS_A_URL', 'IERS_A_URL_MIRROR', 'IERS_A_README', 'IERS_B_FILE', 'IERS_B_URL', 'IERS_B_README', 'IERSRangeError', 'IERSStaleWarning', 'LeapSeconds', 'IERS_LEAP_SECOND_FILE', 'IERS_LEAP_SECOND_URL', 'IETF_LEAP_SECOND_URL'] # IERS-A default file name, URL, and ReadMe with content description IERS_A_FILE = 'finals2000A.all' IERS_A_URL = 'ftp://anonymous:mail%40astropy.org@gdc.cddis.eosdis.nasa.gov/pub/products/iers/finals2000A.all' # noqa: E501 IERS_A_URL_MIRROR = 'https://datacenter.iers.org/data/9/finals2000A.all' IERS_A_README = get_pkg_data_filename('data/ReadMe.finals2000A') # IERS-B default file name, URL, and ReadMe with content description IERS_B_FILE = get_pkg_data_filename('data/eopc04_IAU2000.62-now') IERS_B_URL = 'http://hpiers.obspm.fr/iers/eop/eopc04/eopc04_IAU2000.62-now' IERS_B_README = get_pkg_data_filename('data/ReadMe.eopc04_IAU2000') # LEAP SECONDS default file name, URL, and alternative format/URL IERS_LEAP_SECOND_FILE = get_pkg_data_filename('data/Leap_Second.dat') IERS_LEAP_SECOND_URL = 'https://hpiers.obspm.fr/iers/bul/bulc/Leap_Second.dat' IETF_LEAP_SECOND_URL = 'https://www.ietf.org/timezones/data/leap-seconds.list' # Status/source values returned by IERS.ut1_utc FROM_IERS_B = 0 FROM_IERS_A = 1 FROM_IERS_A_PREDICTION = 2 TIME_BEFORE_IERS_RANGE = -1 TIME_BEYOND_IERS_RANGE = -2 MJD_ZERO = 2400000.5 INTERPOLATE_ERROR = """\ interpolating from IERS_Auto using predictive values that are more than {0} days old. Normally you should not see this error because this class automatically downloads the latest IERS-A table. Perhaps you are offline? If you understand what you are doing then this error can be suppressed by setting the auto_max_age configuration variable to ``None``: from astropy.utils.iers import conf conf.auto_max_age = None """ MONTH_ABBR = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] def download_file(*args, **kwargs): """ Overload astropy.utils.data.download_file within iers module to use a custom (longer) wait time. This just passes through ``*args`` and ``**kwargs`` after temporarily setting the download_file remote timeout to the local ``iers.conf.remote_timeout`` value. """ kwargs.setdefault('http_headers', {'User-Agent': 'astropy/iers', 'Accept': '*/*'}) with utils.data.conf.set_temp('remote_timeout', conf.remote_timeout): return utils.data.download_file(*args, **kwargs) def _none_to_float(value): """ Convert None to a valid floating point value. Especially for auto_max_age = None. """ return (value if value is not None else np.finfo(float).max) class IERSStaleWarning(AstropyWarning): pass class Conf(_config.ConfigNamespace): """ Configuration parameters for `astropy.utils.iers`. """ auto_download = _config.ConfigItem( True, 'Enable auto-downloading of the latest IERS data. If set to False ' 'then the local IERS-B file will be used by default (even if the ' 'full IERS file with predictions was already downloaded and cached). ' 'This parameter also controls whether internet resources will be ' 'queried to update the leap second table if the installed version is ' 'out of date. Default is True.') auto_max_age = _config.ConfigItem( 30.0, 'Maximum age (days) of predictive data before auto-downloading. ' 'See "Auto refresh behavior" in astropy.utils.iers documentation for details.' 'Default is 30.') iers_auto_url = _config.ConfigItem( IERS_A_URL, 'URL for auto-downloading IERS file data.') iers_auto_url_mirror = _config.ConfigItem( IERS_A_URL_MIRROR, 'Mirror URL for auto-downloading IERS file data.') remote_timeout = _config.ConfigItem( 10.0, 'Remote timeout downloading IERS file data (seconds).') system_leap_second_file = _config.ConfigItem( '', 'System file with leap seconds.') iers_leap_second_auto_url = _config.ConfigItem( IERS_LEAP_SECOND_URL, 'URL for auto-downloading leap seconds.') ietf_leap_second_auto_url = _config.ConfigItem( IETF_LEAP_SECOND_URL, 'Alternate URL for auto-downloading leap seconds.') conf = Conf() class IERSRangeError(IndexError): """ Any error for when dates are outside of the valid range for IERS """ class IERS(QTable): """Generic IERS table class, defining interpolation functions. Sub-classed from `astropy.table.QTable`. The table should hold columns 'MJD', 'UT1_UTC', 'dX_2000A'/'dY_2000A', and 'PM_x'/'PM_y'. """ iers_table = None """Cached table, returned if ``open`` is called without arguments.""" @classmethod def open(cls, file=None, cache=False, **kwargs): """Open an IERS table, reading it from a file if not loaded before. Parameters ---------- file : str or None full local or network path to the ascii file holding IERS data, for passing on to the ``read`` class methods (further optional arguments that are available for some IERS subclasses can be added). If None, use the default location from the ``read`` class method. cache : bool Whether to use cache. Defaults to False, since IERS files are regularly updated. Returns ------- IERS An IERS table class instance Notes ----- On the first call in a session, the table will be memoized (in the ``iers_table`` class attribute), and further calls to ``open`` will return this stored table if ``file=None`` (the default). If a table needs to be re-read from disk, pass on an explicit file location or use the (sub-class) close method and re-open. If the location is a network location it is first downloaded via download_file. For the IERS class itself, an IERS_B sub-class instance is opened. """ if file is not None or cls.iers_table is None: if file is not None: if urlparse(file).netloc: kwargs.update(file=download_file(file, cache=cache)) else: kwargs.update(file=file) # TODO: the below is really ugly and probably a bad idea. Instead, # there should probably be an IERSBase class, which provides # useful methods but cannot really be used on its own, and then # *perhaps* an IERS class which provides best defaults. But for # backwards compatibility, we use the IERS_B reader for IERS here. if cls is IERS: cls.iers_table = IERS_B.read(**kwargs) else: cls.iers_table = cls.read(**kwargs) return cls.iers_table @classmethod def close(cls): """Remove the IERS table from the class. This allows the table to be re-read from disk during one's session (e.g., if one finds it is out of date and has updated the file). """ cls.iers_table = None def mjd_utc(self, jd1, jd2=0.): """Turn a time to MJD, returning integer and fractional parts. Parameters ---------- jd1 : float, array, or `~astropy.time.Time` first part of two-part JD, or Time object jd2 : float or array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. Returns ------- mjd : float or array integer part of MJD utc : float or array fractional part of MJD """ try: # see if this is a Time object jd1, jd2 = jd1.utc.jd1, jd1.utc.jd2 except Exception: pass mjd = np.floor(jd1 - MJD_ZERO + jd2) utc = jd1 - (MJD_ZERO+mjd) + jd2 return mjd, utc def ut1_utc(self, jd1, jd2=0., return_status=False): """Interpolate UT1-UTC corrections in IERS Table for given dates. Parameters ---------- jd1 : float, array of float, or `~astropy.time.Time` object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- ut1_utc : float or float array UT1-UTC, interpolated in IERS Table status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['UT1_UTC'], self.ut1_utc_source if return_status else None) def dcip_xy(self, jd1, jd2=0., return_status=False): """Interpolate CIP corrections in IERS Table for given dates. Parameters ---------- jd1 : float, array of float, or `~astropy.time.Time` object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD (default 0., ignored if jd1 is Time) return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- D_x : `~astropy.units.Quantity` ['angle'] x component of CIP correction for the requested times. D_y : `~astropy.units.Quantity` ['angle'] y component of CIP correction for the requested times status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['dX_2000A', 'dY_2000A'], self.dcip_source if return_status else None) def pm_xy(self, jd1, jd2=0., return_status=False): """Interpolate polar motions from IERS Table for given dates. Parameters ---------- jd1 : float, array of float, or `~astropy.time.Time` object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- PM_x : `~astropy.units.Quantity` ['angle'] x component of polar motion for the requested times. PM_y : `~astropy.units.Quantity` ['angle'] y component of polar motion for the requested times. status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['PM_x', 'PM_y'], self.pm_source if return_status else None) def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd): """ Check that the indices from interpolation match those after clipping to the valid table range. This method gets overridden in the IERS_Auto class because it has different requirements. """ if np.any(indices_orig != indices_clipped): raise IERSRangeError('(some) times are outside of range covered ' 'by IERS table.') def _interpolate(self, jd1, jd2, columns, source=None): mjd, utc = self.mjd_utc(jd1, jd2) # enforce array is_scalar = not hasattr(mjd, '__array__') or mjd.ndim == 0 if is_scalar: mjd = np.array([mjd]) utc = np.array([utc]) elif mjd.size == 0: # Short-cut empty input. return np.array([]) self._refresh_table_as_needed(mjd) # For typical format, will always find a match (since MJD are integer) # hence, important to define which side we will be; this ensures # self['MJD'][i-1]<=mjd<self['MJD'][i] i = np.searchsorted(self['MJD'].value, mjd, side='right') # Get index to MJD at or just below given mjd, clipping to ensure we # stay in range of table (status will be set below for those outside) i1 = np.clip(i, 1, len(self) - 1) i0 = i1 - 1 mjd_0, mjd_1 = self['MJD'][i0].value, self['MJD'][i1].value results = [] for column in columns: val_0, val_1 = self[column][i0], self[column][i1] d_val = val_1 - val_0 if column == 'UT1_UTC': # Check & correct for possible leap second (correcting diff., # not 1st point, since jump can only happen right at 2nd point) d_val -= d_val.round() # Linearly interpolate (which is what TEMPO does for UT1-UTC, but # may want to follow IERS gazette #13 for more precise # interpolation and correction for tidal effects; # https://maia.usno.navy.mil/iers-gaz13) val = val_0 + (mjd - mjd_0 + utc) / (mjd_1 - mjd_0) * d_val # Do not extrapolate outside range, instead just propagate last values. val[i == 0] = self[column][0] val[i == len(self)] = self[column][-1] if is_scalar: val = val[0] results.append(val) if source: # Set status to source, using the routine passed in. status = source(i1) # Check for out of range status[i == 0] = TIME_BEFORE_IERS_RANGE status[i == len(self)] = TIME_BEYOND_IERS_RANGE if is_scalar: status = status[0] results.append(status) return results else: self._check_interpolate_indices(i1, i, np.max(mjd)) return results[0] if len(results) == 1 else results def _refresh_table_as_needed(self, mjd): """ Potentially update the IERS table in place depending on the requested time values in ``mdj`` and the time span of the table. The base behavior is not to update the table. ``IERS_Auto`` overrides this method. """ pass def ut1_utc_source(self, i): """Source for UT1-UTC. To be overridden by subclass.""" return np.zeros_like(i) def dcip_source(self, i): """Source for CIP correction. To be overridden by subclass.""" return np.zeros_like(i) def pm_source(self, i): """Source for polar motion. To be overridden by subclass.""" return np.zeros_like(i) @property def time_now(self): """ Property to provide the current time, but also allow for explicitly setting the _time_now attribute for testing purposes. """ try: return self._time_now except Exception: return Time.now() def _convert_col_for_table(self, col): # Fill masked columns with units to avoid dropped-mask warnings # when converting to Quantity. # TODO: Once we support masked quantities, we can drop this and # in the code below replace b_bad with table['UT1_UTC_B'].mask, etc. if (getattr(col, 'unit', None) is not None and isinstance(col, MaskedColumn)): col = col.filled(np.nan) return super()._convert_col_for_table(col) class IERS_A(IERS): """IERS Table class targeted to IERS A, provided by USNO. These include rapid turnaround and predicted times. See https://datacenter.iers.org/eop.php Notes ----- The IERS A file is not part of astropy. It can be downloaded from ``iers.IERS_A_URL`` or ``iers.IERS_A_URL_MIRROR``. See ``iers.__doc__`` for instructions on use in ``Time``, etc. """ iers_table = None @classmethod def _combine_a_b_columns(cls, iers_a): """ Return a new table with appropriate combination of IERS_A and B columns. """ # IERS A has some rows at the end that hold nothing but dates & MJD # presumably to be filled later. Exclude those a priori -- there # should at least be a predicted UT1-UTC and PM! table = iers_a[np.isfinite(iers_a['UT1_UTC_A']) & (iers_a['PolPMFlag_A'] != '')] # This does nothing for IERS_A, but allows IERS_Auto to ensure the # IERS B values in the table are consistent with the true ones. table = cls._substitute_iers_b(table) # Combine A and B columns, using B where possible. b_bad = np.isnan(table['UT1_UTC_B']) table['UT1_UTC'] = np.where(b_bad, table['UT1_UTC_A'], table['UT1_UTC_B']) table['UT1Flag'] = np.where(b_bad, table['UT1Flag_A'], 'B') # Repeat for polar motions. b_bad = np.isnan(table['PM_X_B']) | np.isnan(table['PM_Y_B']) table['PM_x'] = np.where(b_bad, table['PM_x_A'], table['PM_X_B']) table['PM_y'] = np.where(b_bad, table['PM_y_A'], table['PM_Y_B']) table['PolPMFlag'] = np.where(b_bad, table['PolPMFlag_A'], 'B') b_bad = np.isnan(table['dX_2000A_B']) | np.isnan(table['dY_2000A_B']) table['dX_2000A'] = np.where(b_bad, table['dX_2000A_A'], table['dX_2000A_B']) table['dY_2000A'] = np.where(b_bad, table['dY_2000A_A'], table['dY_2000A_B']) table['NutFlag'] = np.where(b_bad, table['NutFlag_A'], 'B') # Get the table index for the first row that has predictive values # PolPMFlag_A IERS (I) or Prediction (P) flag for # Bull. A polar motion values # UT1Flag_A IERS (I) or Prediction (P) flag for # Bull. A UT1-UTC values # Since only 'P' and 'I' are possible and 'P' is guaranteed to come # after 'I', we can use searchsorted for 100 times speed up over # finding the first index where the flag equals 'P'. p_index = min(np.searchsorted(table['UT1Flag_A'], 'P'), np.searchsorted(table['PolPMFlag_A'], 'P')) table.meta['predictive_index'] = p_index table.meta['predictive_mjd'] = table['MJD'][p_index].value return table @classmethod def _substitute_iers_b(cls, table): # See documentation in IERS_Auto. return table @classmethod def read(cls, file=None, readme=None): """Read IERS-A table from a finals2000a.* file provided by USNO. Parameters ---------- file : str full path to ascii file holding IERS-A data. Defaults to ``iers.IERS_A_FILE``. readme : str full path to ascii file holding CDS-style readme. Defaults to package version, ``iers.IERS_A_README``. Returns ------- ``IERS_A`` class instance """ if file is None: file = IERS_A_FILE if readme is None: readme = IERS_A_README iers_a = super().read(file, format='cds', readme=readme) # Combine the A and B data for UT1-UTC and PM columns table = cls._combine_a_b_columns(iers_a) table.meta['data_path'] = file table.meta['readme_path'] = readme return table def ut1_utc_source(self, i): """Set UT1-UTC source flag for entries in IERS table""" ut1flag = self['UT1Flag'][i] source = np.ones_like(i) * FROM_IERS_B source[ut1flag == 'I'] = FROM_IERS_A source[ut1flag == 'P'] = FROM_IERS_A_PREDICTION return source def dcip_source(self, i): """Set CIP correction source flag for entries in IERS table""" nutflag = self['NutFlag'][i] source = np.ones_like(i) * FROM_IERS_B source[nutflag == 'I'] = FROM_IERS_A source[nutflag == 'P'] = FROM_IERS_A_PREDICTION return source def pm_source(self, i): """Set polar motion source flag for entries in IERS table""" pmflag = self['PolPMFlag'][i] source = np.ones_like(i) * FROM_IERS_B source[pmflag == 'I'] = FROM_IERS_A source[pmflag == 'P'] = FROM_IERS_A_PREDICTION return source class IERS_B(IERS): """IERS Table class targeted to IERS B, provided by IERS itself. These are final values; see https://www.iers.org/IERS/EN/Home/home_node.html Notes ----- If the package IERS B file (```iers.IERS_B_FILE``) is out of date, a new version can be downloaded from ``iers.IERS_B_URL``. """ iers_table = None @classmethod def read(cls, file=None, readme=None, data_start=14): """Read IERS-B table from a eopc04_iau2000.* file provided by IERS. Parameters ---------- file : str full path to ascii file holding IERS-B data. Defaults to package version, ``iers.IERS_B_FILE``. readme : str full path to ascii file holding CDS-style readme. Defaults to package version, ``iers.IERS_B_README``. data_start : int starting row. Default is 14, appropriate for standard IERS files. Returns ------- ``IERS_B`` class instance """ if file is None: file = IERS_B_FILE if readme is None: readme = IERS_B_README table = super().read(file, format='cds', readme=readme, data_start=data_start) table.meta['data_path'] = file table.meta['readme_path'] = readme return table def ut1_utc_source(self, i): """Set UT1-UTC source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B def dcip_source(self, i): """Set CIP correction source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B def pm_source(self, i): """Set PM source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B class IERS_Auto(IERS_A): """ Provide most-recent IERS data and automatically handle downloading of updated values as necessary. """ iers_table = None @classmethod def open(cls): """If the configuration setting ``astropy.utils.iers.conf.auto_download`` is set to True (default), then open a recent version of the IERS-A table with predictions for UT1-UTC and polar motion out to approximately one year from now. If the available version of this file is older than ``astropy.utils.iers.conf.auto_max_age`` days old (or non-existent) then it will be downloaded over the network and cached. If the configuration setting ``astropy.utils.iers.conf.auto_download`` is set to False then ``astropy.utils.iers.IERS()`` is returned. This is normally the IERS-B table that is supplied with astropy. On the first call in a session, the table will be memoized (in the ``iers_table`` class attribute), and further calls to ``open`` will return this stored table. Returns ------- `~astropy.table.QTable` instance With IERS (Earth rotation) data columns """ if not conf.auto_download: cls.iers_table = IERS_B.open() return cls.iers_table all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror) if cls.iers_table is not None: # If the URL has changed, we need to redownload the file, so we # should ignore the internally cached version. if cls.iers_table.meta.get('data_url') in all_urls: return cls.iers_table try: filename = download_file(all_urls[0], sources=all_urls, cache=True) except Exception as err: # Issue a warning here, perhaps user is offline. An exception # will be raised downstream when actually trying to interpolate # predictive values. warn(AstropyWarning( f'failed to download {" and ".join(all_urls)}, ' f'using local IERS-B: {err}')) cls.iers_table = IERS_B.open() return cls.iers_table cls.iers_table = cls.read(file=filename) cls.iers_table.meta['data_url'] = all_urls[0] return cls.iers_table def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd): """Check that the indices from interpolation match those after clipping to the valid table range. The IERS_Auto class is exempted as long as it has sufficiently recent available data so the clipped interpolation is always within the confidence bounds of current Earth rotation knowledge. """ predictive_mjd = self.meta['predictive_mjd'] # See explanation in _refresh_table_as_needed for these conditions auto_max_age = _none_to_float(conf.auto_max_age) if (max_input_mjd > predictive_mjd and self.time_now.mjd - predictive_mjd > auto_max_age): raise ValueError(INTERPOLATE_ERROR.format(auto_max_age)) def _refresh_table_as_needed(self, mjd): """Potentially update the IERS table in place depending on the requested time values in ``mjd`` and the time span of the table. For IERS_Auto the behavior is that the table is refreshed from the IERS server if both the following apply: - Any of the requested IERS values are predictive. The IERS-A table contains predictive data out for a year after the available definitive values. - The first predictive values are at least ``conf.auto_max_age days`` old. In other words the IERS-A table was created by IERS long enough ago that it can be considered stale for predictions. """ max_input_mjd = np.max(mjd) now_mjd = self.time_now.mjd # IERS-A table contains predictive data out for a year after # the available definitive values. fpi = self.meta['predictive_index'] predictive_mjd = self.meta['predictive_mjd'] # Update table in place if necessary auto_max_age = _none_to_float(conf.auto_max_age) # If auto_max_age is smaller than IERS update time then repeated downloads may # occur without getting updated values (giving a IERSStaleWarning). if auto_max_age < 10: raise ValueError('IERS auto_max_age configuration value must be larger than 10 days') if (max_input_mjd > predictive_mjd and (now_mjd - predictive_mjd) > auto_max_age): all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror) # Get the latest version try: filename = download_file( all_urls[0], sources=all_urls, cache="update") except Exception as err: # Issue a warning here, perhaps user is offline. An exception # will be raised downstream when actually trying to interpolate # predictive values. warn(AstropyWarning( f'failed to download {" and ".join(all_urls)}: {err}.\n' 'A coordinate or time-related ' 'calculation might be compromised or fail because the dates are ' 'not covered by the available IERS file. See the ' '"IERS data access" section of the astropy documentation ' 'for additional information on working offline.')) return new_table = self.__class__.read(file=filename) new_table.meta['data_url'] = str(all_urls[0]) # New table has new values? if new_table['MJD'][-1] > self['MJD'][-1]: # Replace *replace* current values from the first predictive index through # the end of the current table. This replacement is much faster than just # deleting all rows and then using add_row for the whole duration. new_fpi = np.searchsorted(new_table['MJD'].value, predictive_mjd, side='right') n_replace = len(self) - fpi self[fpi:] = new_table[new_fpi:new_fpi + n_replace] # Sanity check for continuity if new_table['MJD'][new_fpi + n_replace] - self['MJD'][-1] != 1.0 * u.d: raise ValueError('unexpected gap in MJD when refreshing IERS table') # Now add new rows in place for row in new_table[new_fpi + n_replace:]: self.add_row(row) self.meta.update(new_table.meta) else: warn(IERSStaleWarning( 'IERS_Auto predictive values are older than {} days but downloading ' 'the latest table did not find newer values'.format(conf.auto_max_age))) @classmethod def _substitute_iers_b(cls, table): """Substitute IERS B values with those from a real IERS B table. IERS-A has IERS-B values included, but for reasons unknown these do not match the latest IERS-B values (see comments in #4436). Here, we use the bundled astropy IERS-B table to overwrite the values in the downloaded IERS-A table. """ iers_b = IERS_B.open() # Substitute IERS-B values for existing B values in IERS-A table mjd_b = table['MJD'][np.isfinite(table['UT1_UTC_B'])] i0 = np.searchsorted(iers_b['MJD'], mjd_b[0], side='left') i1 = np.searchsorted(iers_b['MJD'], mjd_b[-1], side='right') iers_b = iers_b[i0:i1] n_iers_b = len(iers_b) # If there is overlap then replace IERS-A values from available IERS-B if n_iers_b > 0: # Sanity check that we are overwriting the correct values if not u.allclose(table['MJD'][:n_iers_b], iers_b['MJD']): raise ValueError('unexpected mismatch when copying ' 'IERS-B values into IERS-A table.') # Finally do the overwrite table['UT1_UTC_B'][:n_iers_b] = iers_b['UT1_UTC'] table['PM_X_B'][:n_iers_b] = iers_b['PM_x'] table['PM_Y_B'][:n_iers_b] = iers_b['PM_y'] table['dX_2000A_B'][:n_iers_b] = iers_b['dX_2000A'] table['dY_2000A_B'][:n_iers_b] = iers_b['dY_2000A'] return table class earth_orientation_table(ScienceState): """Default IERS table for Earth rotation and reference systems service. These tables are used to calculate the offsets between ``UT1`` and ``UTC`` and for conversion to Earth-based coordinate systems. The state itself is an IERS table, as an instance of one of the `~astropy.utils.iers.IERS` classes. The default, the auto-updating `~astropy.utils.iers.IERS_Auto` class, should suffice for most purposes. Examples -------- To temporarily use the IERS-B file packaged with astropy:: >>> from astropy.utils import iers >>> from astropy.time import Time >>> iers_b = iers.IERS_B.open(iers.IERS_B_FILE) >>> with iers.earth_orientation_table.set(iers_b): ... print(Time('2000-01-01').ut1.isot) 2000-01-01T00:00:00.355 To use the most recent IERS-A file for the whole session:: >>> iers_a = iers.IERS_A.open(iers.IERS_A_URL) # doctest: +SKIP >>> iers.earth_orientation_table.set(iers_a) # doctest: +SKIP <ScienceState earth_orientation_table: <IERS_A length=17463>...> To go back to the default (of `~astropy.utils.iers.IERS_Auto`):: >>> iers.earth_orientation_table.set(None) # doctest: +SKIP <ScienceState earth_orientation_table: <IERS_Auto length=17428>...> """ _value = None @classmethod def validate(cls, value): if value is None: value = IERS_Auto.open() if not isinstance(value, IERS): raise ValueError("earth_orientation_table requires an IERS Table.") return value class LeapSeconds(QTable): """Leap seconds class, holding TAI-UTC differences. The table should hold columns 'year', 'month', 'tai_utc'. Methods are provided to initialize the table from IERS ``Leap_Second.dat``, IETF/ntp ``leap-seconds.list``, or built-in ERFA/SOFA, and to update the list used by ERFA. Notes ----- Astropy has a built-in ``iers.IERS_LEAP_SECONDS_FILE``. Up to date versions can be downloaded from ``iers.IERS_LEAP_SECONDS_URL`` or ``iers.LEAP_SECONDS_LIST_URL``. Many systems also store a version of ``leap-seconds.list`` for use with ``ntp`` (e.g., on Debian/Ubuntu systems, ``/usr/share/zoneinfo/leap-seconds.list``). To prevent querying internet resources if the available local leap second file(s) are out of date, set ``iers.conf.auto_download = False``. This must be done prior to performing any ``Time`` scale transformations related to UTC (e.g. converting from UTC to TAI). """ # Note: Time instances in this class should use scale='tai' to avoid # needing leap seconds in their creation or interpretation. _re_expires = re.compile(r'^#.*File expires on[:\s]+(\d+\s\w+\s\d+)\s*$') _expires = None _auto_open_files = ['erfa', IERS_LEAP_SECOND_FILE, 'system_leap_second_file', 'iers_leap_second_auto_url', 'ietf_leap_second_auto_url'] """Files or conf attributes to try in auto_open.""" @classmethod def open(cls, file=None, cache=False): """Open a leap-second list. Parameters ---------- file : path-like or None Full local or network path to the file holding leap-second data, for passing on to the various ``from_`` class methods. If 'erfa', return the data used by the ERFA library. If `None`, use default locations from file and configuration to find a table that is not expired. cache : bool Whether to use cache. Defaults to False, since leap-second files are regularly updated. Returns ------- leap_seconds : `~astropy.utils.iers.LeapSeconds` Table with 'year', 'month', and 'tai_utc' columns, plus possibly others. Notes ----- Bulletin C is released about 10 days after a possible leap second is introduced, i.e., mid-January or mid-July. Expiration days are thus generally at least 150 days after the present. For the auto-loading, a list comprised of the table shipped with astropy, and files and URLs in `~astropy.utils.iers.Conf` are tried, returning the first that is sufficiently new, or the newest among them all. """ if file is None: return cls.auto_open() if file.lower() == 'erfa': return cls.from_erfa() if urlparse(file).netloc: file = download_file(file, cache=cache) # Just try both reading methods. try: return cls.from_iers_leap_seconds(file) except Exception: return cls.from_leap_seconds_list(file) @staticmethod def _today(): # Get current day in scale='tai' without going through a scale change # (so we do not need leap seconds). s = '{0.year:04d}-{0.month:02d}-{0.day:02d}'.format(datetime.utcnow()) return Time(s, scale='tai', format='iso', out_subfmt='date') @classmethod def auto_open(cls, files=None): """Attempt to get an up-to-date leap-second list. The routine will try the files in sequence until it finds one whose expiration date is "good enough" (see below). If none are good enough, it returns the one with the most recent expiration date, warning if that file is expired. For remote files that are cached already, the cached file is tried first before attempting to retrieve it again. Parameters ---------- files : list of path-like, optional List of files/URLs to attempt to open. By default, uses ``cls._auto_open_files``. Returns ------- leap_seconds : `~astropy.utils.iers.LeapSeconds` Up to date leap-second table Notes ----- Bulletin C is released about 10 days after a possible leap second is introduced, i.e., mid-January or mid-July. Expiration days are thus generally at least 150 days after the present. We look for a file that expires more than 180 - `~astropy.utils.iers.Conf.auto_max_age` after the present. """ good_enough = cls._today() + TimeDelta(180-_none_to_float(conf.auto_max_age), format='jd') if files is None: # Basic files to go over (entries in _auto_open_files can be # configuration items, which we want to be sure are up to date). files = [getattr(conf, f, f) for f in cls._auto_open_files] # Remove empty entries. files = [f for f in files if f] # Our trials start with normal files and remote ones that are # already in cache. The bools here indicate that the cache # should be used. trials = [(f, True) for f in files if not urlparse(f).netloc or is_url_in_cache(f)] # If we are allowed to download, we try downloading new versions # if none of the above worked. if conf.auto_download: trials += [(f, False) for f in files if urlparse(f).netloc] self = None err_list = [] # Go through all entries, and return the first one that # is not expired, or the most up to date one. for f, allow_cache in trials: if not allow_cache: clear_download_cache(f) try: trial = cls.open(f, cache=True) except Exception as exc: err_list.append(exc) continue if self is None or trial.expires > self.expires: self = trial self.meta['data_url'] = str(f) if self.expires > good_enough: break if self is None: raise ValueError('none of the files could be read. The ' 'following errors were raised:\n' + str(err_list)) if self.expires < self._today(): warn('leap-second file is expired.', IERSStaleWarning) return self @property def expires(self): """The limit of validity of the table.""" return self._expires @classmethod def _read_leap_seconds(cls, file, **kwargs): """Read a file, identifying expiration by matching 'File expires'""" expires = None # Find expiration date. with get_readable_fileobj(file) as fh: lines = fh.readlines() for line in lines: match = cls._re_expires.match(line) if match: day, month, year = match.groups()[0].split() month_nb = MONTH_ABBR.index(month[:3]) + 1 expires = Time(f'{year}-{month_nb:02d}-{day}', scale='tai', out_subfmt='date') break else: raise ValueError(f'did not find expiration date in {file}') self = cls.read(lines, format='ascii.no_header', **kwargs) self._expires = expires return self @classmethod def from_iers_leap_seconds(cls, file=IERS_LEAP_SECOND_FILE): """Create a table from a file like the IERS ``Leap_Second.dat``. Parameters ---------- file : path-like, optional Full local or network path to the file holding leap-second data in a format consistent with that used by IERS. By default, uses ``iers.IERS_LEAP_SECOND_FILE``. Notes ----- The file *must* contain the expiration date in a comment line, like '# File expires on 28 June 2020' """ return cls._read_leap_seconds( file, names=['mjd', 'day', 'month', 'year', 'tai_utc']) @classmethod def from_leap_seconds_list(cls, file): """Create a table from a file like the IETF ``leap-seconds.list``. Parameters ---------- file : path-like, optional Full local or network path to the file holding leap-second data in a format consistent with that used by IETF. Up to date versions can be retrieved from ``iers.IETF_LEAP_SECOND_URL``. Notes ----- The file *must* contain the expiration date in a comment line, like '# File expires on: 28 June 2020' """ from astropy.io.ascii import convert_numpy # Here to avoid circular import names = ['ntp_seconds', 'tai_utc', 'comment', 'day', 'month', 'year'] # Note: ntp_seconds does not fit in 32 bit, so causes problems on # 32-bit systems without the np.int64 converter. self = cls._read_leap_seconds( file, names=names, include_names=names[:2], converters={'ntp_seconds': [convert_numpy(np.int64)]}) self['mjd'] = (self['ntp_seconds']/86400 + 15020).round() # Note: cannot use Time.ymdhms, since that might require leap seconds. isot = Time(self['mjd'], format='mjd', scale='tai').isot ymd = np.array([[int(part) for part in t.partition('T')[0].split('-')] for t in isot]) self['year'], self['month'], self['day'] = ymd.T return self @classmethod def from_erfa(cls, built_in=False): """Create table from the leap-second list in ERFA. Parameters ---------- built_in : bool If `False` (default), retrieve the list currently used by ERFA, which may have been updated. If `True`, retrieve the list shipped with erfa. """ current = cls(erfa.leap_seconds.get()) current._expires = Time('{0.year:04d}-{0.month:02d}-{0.day:02d}' .format(erfa.leap_seconds.expires), scale='tai') if not built_in: return current try: erfa.leap_seconds.set(None) # reset to defaults return cls.from_erfa(built_in=False) finally: erfa.leap_seconds.set(current) def update_erfa_leap_seconds(self, initialize_erfa=False): """Add any leap seconds not already present to the ERFA table. This method matches leap seconds with those present in the ERFA table, and extends the latter as necessary. Parameters ---------- initialize_erfa : bool, or 'only', or 'empty' Initialize the ERFA leap second table to its built-in value before trying to expand it. This is generally not needed but can help in case it somehow got corrupted. If equal to 'only', the ERFA table is reinitialized and no attempt it made to update it. If 'empty', the leap second table is emptied before updating, i.e., it is overwritten altogether (note that this may break things in surprising ways, as most leap second tables do not include pre-1970 pseudo leap-seconds; you were warned). Returns ------- n_update : int Number of items updated. Raises ------ ValueError If the leap seconds in the table are not on 1st of January or July, or if the matches are inconsistent. This would normally suggest a currupted leap second table, but might also indicate that the ERFA table was corrupted. If needed, the ERFA table can be reset by calling this method with an appropriate value for ``initialize_erfa``. """ if initialize_erfa == 'empty': # Initialize to empty and update is the same as overwrite. erfa.leap_seconds.set(self) return len(self) if initialize_erfa: erfa.leap_seconds.set() if initialize_erfa == 'only': return 0 return erfa.leap_seconds.update(self)
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst """Test initalization and other aspects of Angle and subclasses""" import pytest import numpy as np from numpy.testing import assert_allclose, assert_array_equal import threading from astropy.coordinates.angles import Longitude, Latitude, Angle from astropy import units as u from astropy.coordinates.errors import (IllegalSecondError, IllegalMinuteError, IllegalHourError, IllegalSecondWarning, IllegalMinuteWarning) def test_create_angles(): """ Tests creating and accessing Angle objects """ ''' The "angle" is a fundamental object. The internal representation is stored in radians, but this is transparent to the user. Units *must* be specified rather than a default value be assumed. This is as much for self-documenting code as anything else. Angle objects simply represent a single angular coordinate. More specific angular coordinates (e.g. Longitude, Latitude) are subclasses of Angle.''' a1 = Angle(54.12412, unit=u.degree) a2 = Angle("54.12412", unit=u.degree) a3 = Angle("54:07:26.832", unit=u.degree) a4 = Angle("54.12412 deg") a5 = Angle("54.12412 degrees") a6 = Angle("54.12412°") # because we like Unicode a7 = Angle((54, 7, 26.832), unit=u.degree) a8 = Angle("54°07'26.832\"") # (deg,min,sec) *tuples* are acceptable, but lists/arrays are *not* # because of the need to eventually support arrays of coordinates a9 = Angle([54, 7, 26.832], unit=u.degree) assert_allclose(a9.value, [54, 7, 26.832]) assert a9.unit is u.degree a10 = Angle(3.60827466667, unit=u.hour) a11 = Angle("3:36:29.7888000120", unit=u.hour) a12 = Angle((3, 36, 29.7888000120), unit=u.hour) # *must* be a tuple # Regression test for #5001 a13 = Angle((3, 36, 29.7888000120), unit='hour') Angle(0.944644098745, unit=u.radian) with pytest.raises(u.UnitsError): Angle(54.12412) # raises an exception because this is ambiguous with pytest.raises(u.UnitsError): Angle(54.12412, unit=u.m) with pytest.raises(ValueError): Angle(12.34, unit="not a unit") a14 = Angle("03h36m29.7888000120") # no trailing 's', but unambiguous a15 = Angle("5h4m3s") # single digits, no decimal assert a15.unit == u.hourangle a16 = Angle("1 d") a17 = Angle("1 degree") assert a16.degree == 1 assert a17.degree == 1 a18 = Angle("54 07.4472", unit=u.degree) a19 = Angle("54:07.4472", unit=u.degree) a20 = Angle("54d07.4472m", unit=u.degree) a21 = Angle("3h36m", unit=u.hour) a22 = Angle("3.6h", unit=u.hour) a23 = Angle("- 3h", unit=u.hour) a24 = Angle("+ 3h", unit=u.hour) # ensure the above angles that should match do assert a1 == a2 == a3 == a4 == a5 == a6 == a7 == a8 == a18 == a19 == a20 assert_allclose(a1.radian, a2.radian) assert_allclose(a2.degree, a3.degree) assert_allclose(a3.radian, a4.radian) assert_allclose(a4.radian, a5.radian) assert_allclose(a5.radian, a6.radian) assert_allclose(a6.radian, a7.radian) assert_allclose(a10.degree, a11.degree) assert a11 == a12 == a13 == a14 assert a21 == a22 assert a23 == -a24 # check for illegal ranges / values with pytest.raises(IllegalSecondError): a = Angle("12 32 99", unit=u.degree) with pytest.raises(IllegalMinuteError): a = Angle("12 99 23", unit=u.degree) with pytest.raises(IllegalSecondError): a = Angle("12 32 99", unit=u.hour) with pytest.raises(IllegalMinuteError): a = Angle("12 99 23", unit=u.hour) with pytest.raises(IllegalHourError): a = Angle("99 25 51.0", unit=u.hour) with pytest.raises(ValueError): a = Angle("12 25 51.0xxx", unit=u.hour) with pytest.raises(ValueError): a = Angle("12h34321m32.2s") assert a1 is not None def test_angle_from_view(): q = np.arange(3.) * u.deg a = q.view(Angle) assert type(a) is Angle assert a.unit is q.unit assert np.all(a == q) q2 = np.arange(4) * u.m with pytest.raises(u.UnitTypeError): q2.view(Angle) def test_angle_ops(): """ Tests operations on Angle objects """ # Angles can be added and subtracted. Multiplication and division by a # scalar is also permitted. A negative operator is also valid. All of # these operate in a single dimension. Attempting to multiply or divide two # Angle objects will return a quantity. An exception will be raised if it # is attempted to store output with a non-angular unit in an Angle [#2718]. a1 = Angle(3.60827466667, unit=u.hour) a2 = Angle("54:07:26.832", unit=u.degree) a1 + a2 # creates new Angle object a1 - a2 -a1 assert_allclose((a1 * 2).hour, 2 * 3.6082746666700003) assert abs((a1 / 3.123456).hour - 3.60827466667 / 3.123456) < 1e-10 # commutativity assert (2 * a1).hour == (a1 * 2).hour a3 = Angle(a1) # makes a *copy* of the object, but identical content as a1 assert_allclose(a1.radian, a3.radian) assert a1 is not a3 a4 = abs(-a1) assert a4.radian == a1.radian a5 = Angle(5.0, unit=u.hour) assert a5 > a1 assert a5 >= a1 assert a1 < a5 assert a1 <= a5 # check operations with non-angular result give Quantity. a6 = Angle(45., u.degree) a7 = a6 * a5 assert type(a7) is u.Quantity # but those with angular result yield Angle. # (a9 is regression test for #5327) a8 = a1 + 1.*u.deg assert type(a8) is Angle a9 = 1.*u.deg + a1 assert type(a9) is Angle with pytest.raises(TypeError): a6 *= a5 with pytest.raises(TypeError): a6 *= u.m with pytest.raises(TypeError): np.sin(a6, out=a6) def test_angle_methods(): # Most methods tested as part of the Quantity tests. # A few tests here which caused problems before: #8368 a = Angle([0., 2.], 'deg') a_mean = a.mean() assert type(a_mean) is Angle assert a_mean == 1. * u.degree a_std = a.std() assert type(a_std) is Angle assert a_std == 1. * u.degree a_var = a.var() assert type(a_var) is u.Quantity assert a_var == 1. * u.degree ** 2 a_ptp = a.ptp() assert type(a_ptp) is Angle assert a_ptp == 2. * u.degree a_max = a.max() assert type(a_max) is Angle assert a_max == 2. * u.degree a_min = a.min() assert type(a_min) is Angle assert a_min == 0. * u.degree def test_angle_convert(): """ Test unit conversion of Angle objects """ angle = Angle("54.12412", unit=u.degree) assert_allclose(angle.hour, 3.60827466667) assert_allclose(angle.radian, 0.944644098745) assert_allclose(angle.degree, 54.12412) assert len(angle.hms) == 3 assert isinstance(angle.hms, tuple) assert angle.hms[0] == 3 assert angle.hms[1] == 36 assert_allclose(angle.hms[2], 29.78879999999947) # also check that the namedtuple attribute-style access works: assert angle.hms.h == 3 assert angle.hms.m == 36 assert_allclose(angle.hms.s, 29.78879999999947) assert len(angle.dms) == 3 assert isinstance(angle.dms, tuple) assert angle.dms[0] == 54 assert angle.dms[1] == 7 assert_allclose(angle.dms[2], 26.831999999992036) # also check that the namedtuple attribute-style access works: assert angle.dms.d == 54 assert angle.dms.m == 7 assert_allclose(angle.dms.s, 26.831999999992036) assert isinstance(angle.dms[0], float) assert isinstance(angle.hms[0], float) # now make sure dms and signed_dms work right for negative angles negangle = Angle("-54.12412", unit=u.degree) assert negangle.dms.d == -54 assert negangle.dms.m == -7 assert_allclose(negangle.dms.s, -26.831999999992036) assert negangle.signed_dms.sign == -1 assert negangle.signed_dms.d == 54 assert negangle.signed_dms.m == 7 assert_allclose(negangle.signed_dms.s, 26.831999999992036) def test_angle_formatting(): """ Tests string formatting for Angle objects """ ''' The string method of Angle has this signature: def string(self, unit=DEGREE, decimal=False, sep=" ", precision=5, pad=False): The "decimal" parameter defaults to False since if you need to print the Angle as a decimal, there's no need to use the "format" method (see above). ''' angle = Angle("54.12412", unit=u.degree) # __str__ is the default `format` assert str(angle) == angle.to_string() res = 'Angle as HMS: 3h36m29.7888s' assert f"Angle as HMS: {angle.to_string(unit=u.hour)}" == res res = 'Angle as HMS: 3:36:29.7888' assert f"Angle as HMS: {angle.to_string(unit=u.hour, sep=':')}" == res res = 'Angle as HMS: 3:36:29.79' assert f"Angle as HMS: {angle.to_string(unit=u.hour, sep=':', precision=2)}" == res # Note that you can provide one, two, or three separators passed as a # tuple or list res = 'Angle as HMS: 3h36m29.7888s' assert "Angle as HMS: {}".format(angle.to_string(unit=u.hour, sep=("h", "m", "s"), precision=4)) == res res = 'Angle as HMS: 3-36|29.7888' assert "Angle as HMS: {}".format(angle.to_string(unit=u.hour, sep=["-", "|"], precision=4)) == res res = 'Angle as HMS: 3-36-29.7888' assert f"Angle as HMS: {angle.to_string(unit=u.hour, sep='-', precision=4)}" == res res = 'Angle as HMS: 03h36m29.7888s' assert f"Angle as HMS: {angle.to_string(unit=u.hour, precision=4, pad=True)}" == res # Same as above, in degrees angle = Angle("3 36 29.78880", unit=u.degree) res = 'Angle as DMS: 3d36m29.7888s' assert f"Angle as DMS: {angle.to_string(unit=u.degree)}" == res res = 'Angle as DMS: 3:36:29.7888' assert f"Angle as DMS: {angle.to_string(unit=u.degree, sep=':')}" == res res = 'Angle as DMS: 3:36:29.79' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep=":", precision=2)) == res # Note that you can provide one, two, or three separators passed as a # tuple or list res = 'Angle as DMS: 3d36m29.7888s' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep=("d", "m", "s"), precision=4)) == res res = 'Angle as DMS: 3-36|29.7888' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep=["-", "|"], precision=4)) == res res = 'Angle as DMS: 3-36-29.7888' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, sep="-", precision=4)) == res res = 'Angle as DMS: 03d36m29.7888s' assert "Angle as DMS: {}".format(angle.to_string(unit=u.degree, precision=4, pad=True)) == res res = 'Angle as rad: 0.0629763rad' assert f"Angle as rad: {angle.to_string(unit=u.radian)}" == res res = 'Angle as rad decimal: 0.0629763' assert f"Angle as rad decimal: {angle.to_string(unit=u.radian, decimal=True)}" == res # check negative angles angle = Angle(-1.23456789, unit=u.degree) angle2 = Angle(-1.23456789, unit=u.hour) assert angle.to_string() == '-1d14m04.444404s' assert angle.to_string(pad=True) == '-01d14m04.444404s' assert angle.to_string(unit=u.hour) == '-0h04m56.2962936s' assert angle2.to_string(unit=u.hour, pad=True) == '-01h14m04.444404s' assert angle.to_string(unit=u.radian, decimal=True) == '-0.0215473' def test_to_string_vector(): # Regression test for the fact that vectorize doesn't work with Numpy 1.6 assert Angle([1./7., 1./7.], unit='deg').to_string()[0] == "0d08m34.28571429s" assert Angle([1./7.], unit='deg').to_string()[0] == "0d08m34.28571429s" assert Angle(1./7., unit='deg').to_string() == "0d08m34.28571429s" def test_angle_format_roundtripping(): """ Ensures that the string representation of an angle can be used to create a new valid Angle. """ a1 = Angle(0, unit=u.radian) a2 = Angle(10, unit=u.degree) a3 = Angle(0.543, unit=u.degree) a4 = Angle('1d2m3.4s') assert Angle(str(a1)).degree == a1.degree assert Angle(str(a2)).degree == a2.degree assert Angle(str(a3)).degree == a3.degree assert Angle(str(a4)).degree == a4.degree # also check Longitude/Latitude ra = Longitude('1h2m3.4s') dec = Latitude('1d2m3.4s') assert_allclose(Angle(str(ra)).degree, ra.degree) assert_allclose(Angle(str(dec)).degree, dec.degree) def test_radec(): """ Tests creation/operations of Longitude and Latitude objects """ ''' Longitude and Latitude are objects that are subclassed from Angle. As with Angle, Longitude and Latitude can parse any unambiguous format (tuples, formatted strings, etc.). The intention is not to create an Angle subclass for every possible coordinate object (e.g. galactic l, galactic b). However, equatorial Longitude/Latitude are so prevalent in astronomy that it's worth creating ones for these units. They will be noted as "special" in the docs and use of the just the Angle class is to be used for other coordinate systems. ''' with pytest.raises(u.UnitsError): ra = Longitude("4:08:15.162342") # error - hours or degrees? with pytest.raises(u.UnitsError): ra = Longitude("-4:08:15.162342") # the "smart" initializer allows >24 to automatically do degrees, but the # Angle-based one does not # TODO: adjust in 0.3 for whatever behavior is decided on # ra = Longitude("26:34:15.345634") # unambiguous b/c hours don't go past 24 # assert_allclose(ra.degree, 26.570929342) with pytest.raises(u.UnitsError): ra = Longitude("26:34:15.345634") # ra = Longitude(68) with pytest.raises(u.UnitsError): ra = Longitude(68) with pytest.raises(u.UnitsError): ra = Longitude(12) with pytest.raises(ValueError): ra = Longitude("garbage containing a d and no units") ra = Longitude("12h43m23s") assert_allclose(ra.hour, 12.7230555556) ra = Longitude((56, 14, 52.52), unit=u.degree) # can accept tuples # TODO: again, fix based on >24 behavior # ra = Longitude((56,14,52.52)) with pytest.raises(u.UnitsError): ra = Longitude((56, 14, 52.52)) with pytest.raises(u.UnitsError): ra = Longitude((12, 14, 52)) # ambiguous w/o units ra = Longitude((12, 14, 52), unit=u.hour) ra = Longitude([56, 64, 52.2], unit=u.degree) # ...but not arrays (yet) # Units can be specified ra = Longitude("4:08:15.162342", unit=u.hour) # TODO: this was the "smart" initializer behavior - adjust in 0.3 appropriately # Where Longitude values are commonly found in hours or degrees, declination is # nearly always specified in degrees, so this is the default. # dec = Latitude("-41:08:15.162342") with pytest.raises(u.UnitsError): dec = Latitude("-41:08:15.162342") dec = Latitude("-41:08:15.162342", unit=u.degree) # same as above def test_negative_zero_dms(): # Test for DMS parser a = Angle('-00:00:10', u.deg) assert_allclose(a.degree, -10. / 3600.) # Unicode minus a = Angle('−00:00:10', u.deg) assert_allclose(a.degree, -10. / 3600.) def test_negative_zero_dm(): # Test for DM parser a = Angle('-00:10', u.deg) assert_allclose(a.degree, -10. / 60.) def test_negative_zero_hms(): # Test for HMS parser a = Angle('-00:00:10', u.hour) assert_allclose(a.hour, -10. / 3600.) def test_negative_zero_hm(): # Test for HM parser a = Angle('-00:10', u.hour) assert_allclose(a.hour, -10. / 60.) def test_negative_sixty_hm(): # Test for HM parser with pytest.warns(IllegalMinuteWarning): a = Angle('-00:60', u.hour) assert_allclose(a.hour, -1.) def test_plus_sixty_hm(): # Test for HM parser with pytest.warns(IllegalMinuteWarning): a = Angle('00:60', u.hour) assert_allclose(a.hour, 1.) def test_negative_fifty_nine_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('-00:59:60', u.deg) assert_allclose(a.degree, -1.) def test_plus_fifty_nine_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('+00:59:60', u.deg) assert_allclose(a.degree, 1.) def test_negative_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('-00:00:60', u.deg) assert_allclose(a.degree, -1. / 60.) def test_plus_sixty_dms(): # Test for DMS parser with pytest.warns(IllegalSecondWarning): a = Angle('+00:00:60', u.deg) assert_allclose(a.degree, 1. / 60.) def test_angle_to_is_angle(): with pytest.warns(IllegalSecondWarning): a = Angle('00:00:60', u.deg) assert isinstance(a, Angle) assert isinstance(a.to(u.rad), Angle) def test_angle_to_quantity(): with pytest.warns(IllegalSecondWarning): a = Angle('00:00:60', u.deg) q = u.Quantity(a) assert isinstance(q, u.Quantity) assert q.unit is u.deg def test_quantity_to_angle(): a = Angle(1.0*u.deg) assert isinstance(a, Angle) with pytest.raises(u.UnitsError): Angle(1.0*u.meter) a = Angle(1.0*u.hour) assert isinstance(a, Angle) assert a.unit is u.hourangle with pytest.raises(u.UnitsError): Angle(1.0*u.min) def test_angle_string(): with pytest.warns(IllegalSecondWarning): a = Angle('00:00:60', u.deg) assert str(a) == '0d01m00s' a = Angle('00:00:59S', u.deg) assert str(a) == '-0d00m59s' a = Angle('00:00:59N', u.deg) assert str(a) == '0d00m59s' a = Angle('00:00:59E', u.deg) assert str(a) == '0d00m59s' a = Angle('00:00:59W', u.deg) assert str(a) == '-0d00m59s' a = Angle('-00:00:10', u.hour) assert str(a) == '-0h00m10s' a = Angle('00:00:59E', u.hour) assert str(a) == '0h00m59s' a = Angle('00:00:59W', u.hour) assert str(a) == '-0h00m59s' a = Angle(3.2, u.radian) assert str(a) == '3.2rad' a = Angle(4.2, u.microarcsecond) assert str(a) == '4.2uarcsec' a = Angle('1.0uarcsec') assert a.value == 1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecN') assert a.value == 1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecS') assert a.value == -1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecE') assert a.value == 1.0 assert a.unit == u.microarcsecond a = Angle('1.0uarcsecW') assert a.value == -1.0 assert a.unit == u.microarcsecond a = Angle("3d") assert_allclose(a.value, 3.0) assert a.unit == u.degree a = Angle("3dN") assert str(a) == "3d00m00s" assert a.unit == u.degree a = Angle("3dS") assert str(a) == "-3d00m00s" assert a.unit == u.degree a = Angle("3dE") assert str(a) == "3d00m00s" assert a.unit == u.degree a = Angle("3dW") assert str(a) == "-3d00m00s" assert a.unit == u.degree a = Angle('10"') assert_allclose(a.value, 10.0) assert a.unit == u.arcsecond a = Angle("10'N") assert_allclose(a.value, 10.0) assert a.unit == u.arcminute a = Angle("10'S") assert_allclose(a.value, -10.0) assert a.unit == u.arcminute a = Angle("10'E") assert_allclose(a.value, 10.0) assert a.unit == u.arcminute a = Angle("10'W") assert_allclose(a.value, -10.0) assert a.unit == u.arcminute a = Angle('45°55′12″N') assert str(a) == '45d55m12s' assert_allclose(a.value, 45.92) assert a.unit == u.deg a = Angle('45°55′12″S') assert str(a) == '-45d55m12s' assert_allclose(a.value, -45.92) assert a.unit == u.deg a = Angle('45°55′12″E') assert str(a) == '45d55m12s' assert_allclose(a.value, 45.92) assert a.unit == u.deg a = Angle('45°55′12″W') assert str(a) == '-45d55m12s' assert_allclose(a.value, -45.92) assert a.unit == u.deg with pytest.raises(ValueError): Angle('00h00m10sN') with pytest.raises(ValueError): Angle('45°55′12″NS') def test_angle_repr(): assert 'Angle' in repr(Angle(0, u.deg)) assert 'Longitude' in repr(Longitude(0, u.deg)) assert 'Latitude' in repr(Latitude(0, u.deg)) a = Angle(0, u.deg) repr(a) def test_large_angle_representation(): """Test that angles above 360 degrees can be output as strings, in repr, str, and to_string. (regression test for #1413)""" a = Angle(350, u.deg) + Angle(350, u.deg) a.to_string() a.to_string(u.hourangle) repr(a) repr(a.to(u.hourangle)) str(a) str(a.to(u.hourangle)) def test_wrap_at_inplace(): a = Angle([-20, 150, 350, 360] * u.deg) out = a.wrap_at('180d', inplace=True) assert out is None assert np.all(a.degree == np.array([-20., 150., -10., 0.])) def test_latitude(): with pytest.raises(ValueError): lat = Latitude(['91d', '89d']) with pytest.raises(ValueError): lat = Latitude('-91d') lat = Latitude(['90d', '89d']) # check that one can get items assert lat[0] == 90 * u.deg assert lat[1] == 89 * u.deg # and that comparison with angles works assert np.all(lat == Angle(['90d', '89d'])) # check setitem works lat[1] = 45. * u.deg assert np.all(lat == Angle(['90d', '45d'])) # but not with values out of range with pytest.raises(ValueError): lat[0] = 90.001 * u.deg with pytest.raises(ValueError): lat[0] = -90.001 * u.deg # these should also not destroy input (#1851) assert np.all(lat == Angle(['90d', '45d'])) # conserve type on unit change (closes #1423) angle = lat.to('radian') assert type(angle) is Latitude # but not on calculations angle = lat - 190 * u.deg assert type(angle) is Angle assert angle[0] == -100 * u.deg lat = Latitude('80d') angle = lat / 2. assert type(angle) is Angle assert angle == 40 * u.deg angle = lat * 2. assert type(angle) is Angle assert angle == 160 * u.deg angle = -lat assert type(angle) is Angle assert angle == -80 * u.deg # Test errors when trying to interoperate with longitudes. with pytest.raises(TypeError) as excinfo: lon = Longitude(10, 'deg') lat = Latitude(lon) assert "A Latitude angle cannot be created from a Longitude angle" in str(excinfo.value) with pytest.raises(TypeError) as excinfo: lon = Longitude(10, 'deg') lat = Latitude([20], 'deg') lat[0] = lon assert "A Longitude angle cannot be assigned to a Latitude angle" in str(excinfo.value) # Check we can work around the Lat vs Long checks by casting explicitly to Angle. lon = Longitude(10, 'deg') lat = Latitude(Angle(lon)) assert lat.value == 10.0 # Check setitem. lon = Longitude(10, 'deg') lat = Latitude([20], 'deg') lat[0] = Angle(lon) assert lat.value[0] == 10.0 def test_longitude(): # Default wrapping at 360d with an array input lon = Longitude(['370d', '88d']) assert np.all(lon == Longitude(['10d', '88d'])) assert np.all(lon == Angle(['10d', '88d'])) # conserve type on unit change and keep wrap_angle (closes #1423) angle = lon.to('hourangle') assert type(angle) is Longitude assert angle.wrap_angle == lon.wrap_angle angle = lon[0] assert type(angle) is Longitude assert angle.wrap_angle == lon.wrap_angle angle = lon[1:] assert type(angle) is Longitude assert angle.wrap_angle == lon.wrap_angle # but not on calculations angle = lon / 2. assert np.all(angle == Angle(['5d', '44d'])) assert type(angle) is Angle assert not hasattr(angle, 'wrap_angle') angle = lon * 2. + 400 * u.deg assert np.all(angle == Angle(['420d', '576d'])) assert type(angle) is Angle # Test setting a mutable value and having it wrap lon[1] = -10 * u.deg assert np.all(lon == Angle(['10d', '350d'])) # Test wrapping and try hitting some edge cases lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian) assert np.all(lon.degree == np.array([0., 90, 180, 270, 0])) lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian, wrap_angle='180d') assert np.all(lon.degree == np.array([0., 90, -180, -90, 0])) # Wrap on setting wrap_angle property (also test auto-conversion of wrap_angle to an Angle) lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian) lon.wrap_angle = '180d' assert np.all(lon.degree == np.array([0., 90, -180, -90, 0])) lon = Longitude('460d') assert lon == Angle('100d') lon.wrap_angle = '90d' assert lon == Angle('-260d') # check that if we initialize a longitude with another longitude, # wrap_angle is kept by default lon2 = Longitude(lon) assert lon2.wrap_angle == lon.wrap_angle # but not if we explicitly set it lon3 = Longitude(lon, wrap_angle='180d') assert lon3.wrap_angle == 180 * u.deg # check for problem reported in #2037 about Longitude initializing to -0 lon = Longitude(0, u.deg) lonstr = lon.to_string() assert not lonstr.startswith('-') # also make sure dtype is correctly conserved assert Longitude(0, u.deg, dtype=float).dtype == np.dtype(float) assert Longitude(0, u.deg, dtype=int).dtype == np.dtype(int) # Test errors when trying to interoperate with latitudes. with pytest.raises(TypeError) as excinfo: lat = Latitude(10, 'deg') lon = Longitude(lat) assert "A Longitude angle cannot be created from a Latitude angle" in str(excinfo.value) with pytest.raises(TypeError) as excinfo: lat = Latitude(10, 'deg') lon = Longitude([20], 'deg') lon[0] = lat assert "A Latitude angle cannot be assigned to a Longitude angle" in str(excinfo.value) # Check we can work around the Lat vs Long checks by casting explicitly to Angle. lat = Latitude(10, 'deg') lon = Longitude(Angle(lat)) assert lon.value == 10.0 # Check setitem. lat = Latitude(10, 'deg') lon = Longitude([20], 'deg') lon[0] = Angle(lat) assert lon.value[0] == 10.0 def test_wrap_at(): a = Angle([-20, 150, 350, 360] * u.deg) assert np.all(a.wrap_at(360 * u.deg).degree == np.array([340., 150., 350., 0.])) assert np.all(a.wrap_at(Angle(360, unit=u.deg)).degree == np.array([340., 150., 350., 0.])) assert np.all(a.wrap_at('360d').degree == np.array([340., 150., 350., 0.])) assert np.all(a.wrap_at('180d').degree == np.array([-20., 150., -10., 0.])) assert np.all(a.wrap_at(np.pi * u.rad).degree == np.array([-20., 150., -10., 0.])) # Test wrapping a scalar Angle a = Angle('190d') assert a.wrap_at('180d') == Angle('-170d') a = Angle(np.arange(-1000.0, 1000.0, 0.125), unit=u.deg) for wrap_angle in (270, 0.2, 0.0, 360.0, 500, -2000.125): aw = a.wrap_at(wrap_angle * u.deg) assert np.all(aw.degree >= wrap_angle - 360.0) assert np.all(aw.degree < wrap_angle) aw = a.to(u.rad).wrap_at(wrap_angle * u.deg) assert np.all(aw.degree >= wrap_angle - 360.0) assert np.all(aw.degree < wrap_angle) def test_is_within_bounds(): a = Angle([-20, 150, 350] * u.deg) assert a.is_within_bounds('0d', '360d') is False assert a.is_within_bounds(None, '360d') is True assert a.is_within_bounds(-30 * u.deg, None) is True a = Angle('-20d') assert a.is_within_bounds('0d', '360d') is False assert a.is_within_bounds(None, '360d') is True assert a.is_within_bounds(-30 * u.deg, None) is True def test_angle_mismatched_unit(): a = Angle('+6h7m8s', unit=u.degree) assert_allclose(a.value, 91.78333333333332) def test_regression_formatting_negative(): # Regression test for a bug that caused: # # >>> Angle(-1., unit='deg').to_string() # '-1d00m-0s' assert Angle(-0., unit='deg').to_string() == '-0d00m00s' assert Angle(-1., unit='deg').to_string() == '-1d00m00s' assert Angle(-0., unit='hour').to_string() == '-0h00m00s' assert Angle(-1., unit='hour').to_string() == '-1h00m00s' def test_regression_formatting_default_precision(): # Regression test for issue #11140 assert Angle('10:20:30.12345678d').to_string() == '10d20m30.12345678s' assert Angle('10d20m30.123456784564s').to_string() == '10d20m30.12345678s' assert Angle('10d20m30.123s').to_string() == '10d20m30.123s' def test_empty_sep(): a = Angle('05h04m31.93830s') assert a.to_string(sep='', precision=2, pad=True) == '050431.94' def test_create_tuple(): """ Tests creation of an angle with a (d,m,s) or (h,m,s) tuple """ a1 = Angle((1, 30, 0), unit=u.degree) assert a1.value == 1.5 a1 = Angle((1, 30, 0), unit=u.hourangle) assert a1.value == 1.5 def test_list_of_quantities(): a1 = Angle([1*u.deg, 1*u.hourangle]) assert a1.unit == u.deg assert_allclose(a1.value, [1, 15]) a2 = Angle([1*u.hourangle, 1*u.deg], u.deg) assert a2.unit == u.deg assert_allclose(a2.value, [15, 1]) def test_multiply_divide(): # Issue #2273 a1 = Angle([1, 2, 3], u.deg) a2 = Angle([4, 5, 6], u.deg) a3 = a1 * a2 assert_allclose(a3.value, [4, 10, 18]) assert a3.unit == (u.deg * u.deg) a3 = a1 / a2 assert_allclose(a3.value, [.25, .4, .5]) assert a3.unit == u.dimensionless_unscaled def test_mixed_string_and_quantity(): a1 = Angle(['1d', 1. * u.deg]) assert_array_equal(a1.value, [1., 1.]) assert a1.unit == u.deg a2 = Angle(['1d', 1 * u.rad * np.pi, '3d']) assert_array_equal(a2.value, [1., 180., 3.]) assert a2.unit == u.deg def test_array_angle_tostring(): aobj = Angle([1, 2], u.deg) assert aobj.to_string().dtype.kind == 'U' assert np.all(aobj.to_string() == ['1d00m00s', '2d00m00s']) def test_wrap_at_without_new(): """ Regression test for subtle bugs from situations where an Angle is created via numpy channels that don't do the standard __new__ but instead depend on array_finalize to set state. Longitude is used because the bug was in its _wrap_angle not getting initialized correctly """ l1 = Longitude([1]*u.deg) l2 = Longitude([2]*u.deg) l = np.concatenate([l1, l2]) assert l._wrap_angle is not None def test__str__(): """ Check the __str__ method used in printing the Angle """ # scalar angle scangle = Angle('10.2345d') strscangle = scangle.__str__() assert strscangle == '10d14m04.2s' # non-scalar array angles arrangle = Angle(['10.2345d', '-20d']) strarrangle = arrangle.__str__() assert strarrangle == '[10d14m04.2s -20d00m00s]' # summarizing for large arrays, ... should appear bigarrangle = Angle(np.ones(10000), u.deg) assert '...' in bigarrangle.__str__() def test_repr_latex(): """ Check the _repr_latex_ method, used primarily by IPython notebooks """ # try with both scalar scangle = Angle(2.1, u.deg) rlscangle = scangle._repr_latex_() # and array angles arrangle = Angle([1, 2.1], u.deg) rlarrangle = arrangle._repr_latex_() assert rlscangle == r'$2^\circ06{}^\prime00{}^{\prime\prime}$' assert rlscangle.split('$')[1] in rlarrangle # make sure the ... appears for large arrays bigarrangle = Angle(np.ones(50000)/50000., u.deg) assert '...' in bigarrangle._repr_latex_() def test_angle_with_cds_units_enabled(): """Regression test for #5350 Especially the example in https://github.com/astropy/astropy/issues/5350#issuecomment-248770151 """ from astropy.units import cds # the problem is with the parser, so remove it temporarily from astropy.coordinates.angle_formats import _AngleParser del _AngleParser._thread_local._parser with cds.enable(): Angle('5d') del _AngleParser._thread_local._parser Angle('5d') def test_longitude_nan(): # Check that passing a NaN to Longitude doesn't raise a warning Longitude([0, np.nan, 1] * u.deg) def test_latitude_nan(): # Check that passing a NaN to Latitude doesn't raise a warning Latitude([0, np.nan, 1] * u.deg) def test_angle_wrap_at_nan(): # Check that passing a NaN to Latitude doesn't raise a warning Angle([0, np.nan, 1] * u.deg).wrap_at(180*u.deg) def test_angle_multithreading(): """ Regression test for issue #7168 """ angles = ['00:00:00']*10000 def parse_test(i=0): Angle(angles, unit='hour') for i in range(10): threading.Thread(target=parse_test, args=(i,)).start()
lpsinger/astropy
astropy/coordinates/tests/test_angles.py
astropy/utils/iers/iers.py
# Copyright 2015 F5 Networks Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import mock class BigIPMock(object): """Mock BIG-IP® object Mocks a BIG-IP® object by substituting a mock icr_session object which returns a user created mock response object. To use, create a mock response object which will get returned by any icr_session HTTP method, then create an interface object, passing in this BIG-IPMock object. Example: # Create a mock response object with status code and JSON. Here # read_json_file() is used to get mock JSON, but you can always pass # in a JSON string, or create a dictionary object and convert to JSON # using json.loads(). response = BIG-IPMock.create_mock_response( 200, BIG-IPMock.read_json_file("f5/BIG-IP/interfaces/test/pool.json") ) # Create BIG-IP® object, passing in mocked response object big_ip = BIG-IPMock(response) # Create interface object test_pool = Pool(big_ip) # Call interface method which will receive mock response object created # above when it calls the icr_session method get(). description = test_pool.get_description("my-Pool") """ def __init__(self, response=mock.Mock()): """Initializes BIG-IPMock object. :param response: Mock response object to return from icr_session calls. :return: """ self.icontrol = self._create_icontrol() self.icr_session = self._create_icr_session() self.icr_uri = 'https://host-abc/mgmt/tm' self.response = response def _create_icontrol(self): return mock.Mock() def _create_icr_session(self): """Creates a mock icr_session object. This mocked icr_session substitutes basic request library methods (get, put, post, etc.) with a method that simply returns a mocked response object. Set the response on the BIG-IPMock object before calling one of the icr_session methods. :rtype object: mock session object. """ def mock_response(url, *args, **kwargs): return self.response icr_session = mock.Mock() icr_session.delete = mock_response icr_session.get = mock_response icr_session.put = mock_response icr_session.post = mock_response icr_session.put = mock_response return icr_session @staticmethod def create_mock_response(status_code, json_str): """Creates a mock HTTP response. :param int status_code: HTTP response code to mock. :param string json: JSON string to mock. :rtype object: mock HTTP response object. """ response = mock.Mock() response.status_code = status_code response.text = json_str response.json.return_value = json.loads(json_str) return response @staticmethod def read_json_file(filename): """Reads JSON file, returning a JSON string. The file must contain a valid JSON object, for example: {"key": "value"...} or {"key": {"key": "value"}...} :param string name: Name of file containing JSON object. :rtype string: JSON object as a string. """ file = open(filename) s = file.read() assert s.__len__() > 0 return s
# Copyright 2017 F5 Networks Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import mock import pytest from f5.bigip import ManagementRoot from f5.bigip.tm.sys.management_route import Management_Route from f5.sdk_exception import MissingRequiredCreationParameter @pytest.fixture def FakeMgmtRoute(): fake_sys = mock.MagicMock() return Management_Route(fake_sys) def test_create_no_args(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create() assert "Missing required params:" in str(EIO.value) def test_create_missing_name(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create(gateway='192.168.1.1', network='172.16.15.0/24') assert str(EIO.value) == "Missing required params: ['name']" def test_create_missing_network(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create(name='testnet', gateway='192.168.1.1') assert str(EIO.value) == "Missing required params: ['network']" def test_create_missing_gateway(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create(name='testnet', network='172.16.15.0/24') assert str(EIO.value) == "Missing required params: ['gateway']" def test_create_mgmtroute(fakeicontrolsession): b = ManagementRoot('192.168.1.1.', 'admin', 'admin') mr1 = b.tm.sys.management_routes.management_route mr2 = b.tm.sys.management_routes.management_route assert mr1 is not mr2
F5Networks/f5-common-python
f5/bigip/tm/sys/test/unit/test_management_route.py
f5/bigip/test/unit/big_ip_mock.py
# coding=utf-8 # # Copyright 2015-2017 F5 Networks Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """BIG-IP® Advanced Firewall Manager™ (AFM®) module. REST URI ``http://localhost/mgmt/tm/security/scrubber`` GUI Path ``Security --> Option --> Network Firewall --> External Redirection --> Scrubbing Profile`` REST Kind ``tm:security:scrubbercollectionstate:*`` """ from f5.bigip.resource import Collection from f5.bigip.resource import OrganizingCollection from f5.bigip.resource import Resource class Scrubber(OrganizingCollection): """BIG-IP® AFM® Scrubber organizing collection.""" def __init__(self, security): super(Scrubber, self).__init__(security) self._meta_data['allowed_lazy_attributes'] = [ Profile_s] class Profile_s(Collection): """BIG-IP® AFM® Scrubber Profile collection""" def __init__(self, scrubber): super(Profile_s, self).__init__(scrubber) self._meta_data['allowed_lazy_attributes'] = [Profile] self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:profilestate': Profile} class Profile(Resource): """BIG-IP® AFM® Scrubber Profile resource""" def __init__(self, profile_s): super(Profile, self).__init__(profile_s) self._meta_data['required_json_kind'] = \ 'tm:security:scrubber:profile:profilestate' self._meta_data['required_load_parameters'].update(('partition', 'name')) self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:scrubber-rt-domain:scrubber_rt_domaincollectionstate': Scrubber_Rt_Domain_s, 'tm:security:scrubber:profile:scrubber-categories:scrubber-categoriescollectionstate': Scrubber_Categories_s, 'tm:security:scrubber:profile:scrubber-virtual-server:scrubber-virtual-servercollectionstate': Scrubber_Virtual_Server_s, 'tm:security:scrubber:profile:scrubber-netflow-protected-server:scrubber-netflow-protected-servercollectionstate': Scrubber_Netflow_Protected_Server_s} self._meta_data['allowed_lazy_attributes'] = [ Scrubber_Rt_Domain_s, Scrubber_Virtual_Server_s, Scrubber_Categories_s, Scrubber_Netflow_Protected_Server_s] class Scrubber_Rt_Domain_s(Collection): """BIG-IP® AFM® Scrubber Profile Route Domain collection""" def __init__(self, profile): super(Scrubber_Rt_Domain_s, self).__init__(profile) self._meta_data['allowed_lazy_attributes'] = [Scrubber_Rt_Domain] self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rt-domainstate': Scrubber_Rt_Domain} class Scrubber_Rt_Domain(Resource): """BIG-IP® AFM® Scrubber Profile Route Domain resource""" def __init__(self, scrubber_rt_domain_s): super(Scrubber_Rt_Domain, self).__init__(scrubber_rt_domain_s) self._meta_data['allowed_lazy_attributes'] = [Scrubber_Rd_Network_Prefix_s] self._meta_data['required_json_kind'] = \ 'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rt-domainstate' self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rd-network-prefix:scrubber-rd-network-prefixcollectionstate': Scrubber_Rd_Network_Prefix_s} self._meta_data['required_creation_parameters'].update(('name', 'routeDomain')) class Scrubber_Rd_Network_Prefix_s(Collection): """BIG-IP® AFM® Scrubber Rd Network Prefix collection""" def __init__(self, scrubber_rt_domain): super(Scrubber_Rd_Network_Prefix_s, self).__init__(scrubber_rt_domain) self._meta_data['allowed_lazy_attributes'] = [Scrubber_Rd_Network_Prefix] self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rd-network-prefix:scrubber-rd-network-prefixstate': Scrubber_Rd_Network_Prefix} class Scrubber_Rd_Network_Prefix(Resource): """BIG-IP® AFM® Scrubber Rd Network Prefix resource""" def __init__(self, scrubber_rd_network_prefix_s): super(Scrubber_Rd_Network_Prefix, self).__init__(scrubber_rd_network_prefix_s) self._meta_data['required_json_kind'] = \ 'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rd-network-prefix:scrubber-rd-network-prefixstate' self._meta_data['required_creation_parameters'].update(('name', 'nextHop', 'dstIp', 'mask')) class Scrubber_Virtual_Server_s(Collection): """BIG-IP® AFM® Scrubber Profile Virtual Server collection""" def __init__(self, profile): super(Scrubber_Virtual_Server_s, self).__init__(profile) self._meta_data['allowed_lazy_attributes'] = [Scrubber_Virtual_Server] self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:scrubber-virtual-server:scrubber-virtual-serverstate': Scrubber_Virtual_Server} class Scrubber_Virtual_Server(Resource): """BIG-IP® AFM® Scrubber Profile Virtual Server resource""" def __init__(self, scrubber_virtual_server_s): super(Scrubber_Virtual_Server, self).__init__(scrubber_virtual_server_s) self._meta_data['required_json_kind'] = \ 'tm:security:scrubber:profile:scrubber-virtual-server:scrubber-virtual-serverstate' self._meta_data['required_creation_parameters'].update(('name', 'vsName')) class Scrubber_Categories_s(Collection): """BIG-IP® AFM® Scrubber Profile Categories collection""" def __init__(self, profile): super(Scrubber_Categories_s, self).__init__(profile) self._meta_data['allowed_lazy_attributes'] = [Scrubber_Categories] self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:scrubber-categories:scrubber-categoriesstate': Scrubber_Categories} class Scrubber_Categories(Resource): """BIG-IP® AFM® Scrubber Profile Categories resource""" def __init__(self, scrubber_categories_s): super(Scrubber_Categories, self).__init__(scrubber_categories_s) self._meta_data['required_json_kind'] = \ 'tm:security:scrubber:profile:scrubber-categories:scrubber-categoriesstate' self._meta_data['required_creation_parameters'].update(('name', 'blacklistCategory', 'routeDomainName')) class Scrubber_Netflow_Protected_Server_s(Collection): """BIG-IP® AFM® Scrubber Profile Netflow Protected Server collection""" def __init__(self, profile): super(Scrubber_Netflow_Protected_Server_s, self).__init__(profile) self._meta_data['allowed_lazy_attributes'] = [Scrubber_Netflow_Protected_Server] self._meta_data['attribute_registry'] = \ {'tm:security:scrubber:profile:scrubber-netflow-protected-server:scrubber-netflow-protected-serverstate': Scrubber_Netflow_Protected_Server} class Scrubber_Netflow_Protected_Server(Resource): """BIG-IP® AFM® Scrubber Profile Netflow Protected Server resource""" def __init__(self, scrubber_netflow_protected_server_s): super(Scrubber_Netflow_Protected_Server, self).__init__(scrubber_netflow_protected_server_s) self._meta_data['required_json_kind'] = \ 'tm:security:scrubber:profile:scrubber-netflow-protected-server:scrubber-netflow-protected-serverstate' self._meta_data['required_creation_parameters'].update(('name', 'npsName'))
# Copyright 2017 F5 Networks Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import mock import pytest from f5.bigip import ManagementRoot from f5.bigip.tm.sys.management_route import Management_Route from f5.sdk_exception import MissingRequiredCreationParameter @pytest.fixture def FakeMgmtRoute(): fake_sys = mock.MagicMock() return Management_Route(fake_sys) def test_create_no_args(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create() assert "Missing required params:" in str(EIO.value) def test_create_missing_name(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create(gateway='192.168.1.1', network='172.16.15.0/24') assert str(EIO.value) == "Missing required params: ['name']" def test_create_missing_network(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create(name='testnet', gateway='192.168.1.1') assert str(EIO.value) == "Missing required params: ['network']" def test_create_missing_gateway(FakeMgmtRoute): with pytest.raises(MissingRequiredCreationParameter) as EIO: FakeMgmtRoute.create(name='testnet', network='172.16.15.0/24') assert str(EIO.value) == "Missing required params: ['gateway']" def test_create_mgmtroute(fakeicontrolsession): b = ManagementRoot('192.168.1.1.', 'admin', 'admin') mr1 = b.tm.sys.management_routes.management_route mr2 = b.tm.sys.management_routes.management_route assert mr1 is not mr2
F5Networks/f5-common-python
f5/bigip/tm/sys/test/unit/test_management_route.py
f5/bigip/tm/security/scrubber.py
# Authors: Adam Li <adam2392@gmail.com> # Alex Rockhill <aprockhill@mailbox.org> # License: BSD Style. from functools import partial from ...utils import verbose from ..utils import (has_dataset, _data_path, _data_path_doc, _get_version, _version_doc) has_epilepsy_ecog_data = partial(has_dataset, name='epilepsy_ecog') @verbose def data_path( path=None, force_update=False, update_path=True, download=True, verbose=None): # noqa: D103 return _data_path(path=path, force_update=force_update, update_path=update_path, name='epilepsy_ecog', download=download) data_path.__doc__ = _data_path_doc.format( name='epilepsy_ecog', conf='MNE_DATASETS_EPILEPSY_ECOG_PATH') def get_version(): # noqa: D103 return _get_version('epilepsy_ecog') get_version.__doc__ = _version_doc.format(name='epilepsy_ecog')
# Author: Martin Luessi <mluessi@nmr.mgh.harvard.edu> # # License: Simplified BSD import os.path as op import pytest import numpy as np from numpy.testing import assert_array_almost_equal, assert_allclose import mne from mne.datasets import testing from mne import (read_cov, read_forward_solution, read_evokeds, convert_forward_solution, VectorSourceEstimate) from mne.cov import regularize from mne.inverse_sparse import gamma_map from mne.inverse_sparse.mxne_inverse import make_stc_from_dipoles from mne.minimum_norm.tests.test_inverse import (assert_stc_res, assert_var_exp_log) from mne import pick_types_forward from mne.utils import assert_stcs_equal, catch_logging from mne.dipole import Dipole data_path = testing.data_path(download=False) fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif') fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif') fname_fwd = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif') subjects_dir = op.join(data_path, 'subjects') def _check_stc(stc, evoked, idx, hemi, fwd, dist_limit=0., ratio=50., res=None, atol=1e-20): """Check correctness.""" assert_array_almost_equal(stc.times, evoked.times, 5) stc_orig = stc if isinstance(stc, VectorSourceEstimate): assert stc.data.any(1).any(1).all() # all dipoles should have some stc = stc.magnitude() amps = np.sum(stc.data ** 2, axis=1) order = np.argsort(amps)[::-1] amps = amps[order] verts = np.concatenate(stc.vertices)[order] hemi_idx = int(order[0] >= len(stc.vertices[1])) hemis = ['lh', 'rh'] assert hemis[hemi_idx] == hemi dist = np.linalg.norm(np.diff(fwd['src'][hemi_idx]['rr'][[idx, verts[0]]], axis=0)[0]) * 1000. assert dist <= dist_limit assert amps[0] > ratio * amps[1] if res is not None: assert_stc_res(evoked, stc_orig, fwd, res, atol=atol) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_standard(): """Test Gamma MAP inverse.""" forward = read_forward_solution(fname_fwd) forward = convert_forward_solution(forward, surf_ori=True) forward = pick_types_forward(forward, meg=False, eeg=True) evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.14) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) alpha = 0.5 with catch_logging() as log: stc = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, verbose=True) _check_stc(stc, evoked, 68477, 'lh', fwd=forward) assert_var_exp_log(log.getvalue(), 20, 22) with catch_logging() as log: stc_vec, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, pick_ori='vector', return_residual=True, verbose=True) assert_var_exp_log(log.getvalue(), 20, 22) assert_stcs_equal(stc_vec.magnitude(), stc) _check_stc(stc_vec, evoked, 68477, 'lh', fwd=forward, res=res) stc, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, pick_ori='vector', return_residual=True) _check_stc(stc, evoked, 82010, 'lh', fwd=forward, dist_limit=6., ratio=2., res=res) with catch_logging() as log: dips = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, return_as_dipoles=True, verbose=True) exp_var = assert_var_exp_log(log.getvalue(), 58, 60) dip_exp_var = np.mean(sum(dip.gof for dip in dips)) assert_allclose(exp_var, dip_exp_var, atol=10) # not really equiv, close assert (isinstance(dips[0], Dipole)) stc_dip = make_stc_from_dipoles(dips, forward['src']) assert_stcs_equal(stc.magnitude(), stc_dip) # force fixed orientation stc, res = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, loose=0, return_residual=True) _check_stc(stc, evoked, 85739, 'lh', fwd=forward, ratio=20., res=res) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_vol_sphere(): """Gamma MAP with a sphere forward and volumic source space.""" evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.16) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) info = evoked.info sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=0.080) src = mne.setup_volume_source_space(subject=None, pos=30., mri=None, sphere=(0.0, 0.0, 0.0, 0.08), bem=None, mindist=5.0, exclude=2.0, sphere_units='m') fwd = mne.make_forward_solution(info, trans=None, src=src, bem=sphere, eeg=False, meg=True) alpha = 0.5 pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0, return_residual=False) pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0.2, return_residual=False) stc = gamma_map(evoked, fwd, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, return_residual=False) assert_array_almost_equal(stc.times, evoked.times, 5) # Compare orientation obtained using fit_dipole and gamma_map # for a simulated evoked containing a single dipole stc = mne.VolSourceEstimate(50e-9 * np.random.RandomState(42).randn(1, 4), vertices=[stc.vertices[0][:1]], tmin=stc.tmin, tstep=stc.tstep) evoked_dip = mne.simulation.simulate_evoked(fwd, stc, info, cov, nave=1e9, use_cps=True) dip_gmap = gamma_map(evoked_dip, fwd, cov, 0.1, return_as_dipoles=True) amp_max = [np.max(d.amplitude) for d in dip_gmap] dip_gmap = dip_gmap[np.argmax(amp_max)] assert (dip_gmap[0].pos[0] in src[0]['rr'][stc.vertices[0]]) dip_fit = mne.fit_dipole(evoked_dip, cov, sphere)[0] assert (np.abs(np.dot(dip_fit.ori[0], dip_gmap.ori[0])) > 0.99)
rkmaddox/mne-python
mne/inverse_sparse/tests/test_gamma_map.py
mne/datasets/epilepsy_ecog/_data.py
# Authors: Denis A. Engemann <denis.engemann@gmail.com> # Teon Brooks <teon.brooks@gmail.com> # # simplified BSD-3 license import datetime import time import numpy as np from .egimff import _read_raw_egi_mff from .events import _combine_triggers from ..base import BaseRaw from ..utils import _read_segments_file, _create_chs from ..meas_info import _empty_info from ..constants import FIFF from ...utils import verbose, logger, warn, _validate_type, _check_fname def _read_header(fid): """Read EGI binary header.""" version = np.fromfile(fid, '<i4', 1)[0] if version > 6 & ~np.bitwise_and(version, 6): version = version.byteswap().astype(np.uint32) else: raise ValueError('Watchout. This does not seem to be a simple ' 'binary EGI file.') def my_fread(*x, **y): return np.fromfile(*x, **y)[0] info = dict( version=version, year=my_fread(fid, '>i2', 1), month=my_fread(fid, '>i2', 1), day=my_fread(fid, '>i2', 1), hour=my_fread(fid, '>i2', 1), minute=my_fread(fid, '>i2', 1), second=my_fread(fid, '>i2', 1), millisecond=my_fread(fid, '>i4', 1), samp_rate=my_fread(fid, '>i2', 1), n_channels=my_fread(fid, '>i2', 1), gain=my_fread(fid, '>i2', 1), bits=my_fread(fid, '>i2', 1), value_range=my_fread(fid, '>i2', 1) ) unsegmented = 1 if np.bitwise_and(version, 1) == 0 else 0 precision = np.bitwise_and(version, 6) if precision == 0: raise RuntimeError('Floating point precision is undefined.') if unsegmented: info.update(dict(n_categories=0, n_segments=1, n_samples=np.fromfile(fid, '>i4', 1)[0], n_events=np.fromfile(fid, '>i2', 1)[0], event_codes=[], category_names=[], category_lengths=[], pre_baseline=0)) for event in range(info['n_events']): event_codes = ''.join(np.fromfile(fid, 'S1', 4).astype('U1')) info['event_codes'].append(event_codes) else: raise NotImplementedError('Only continuous files are supported') info['unsegmented'] = unsegmented info['dtype'], info['orig_format'] = {2: ('>i2', 'short'), 4: ('>f4', 'float'), 6: ('>f8', 'double')}[precision] info['dtype'] = np.dtype(info['dtype']) return info def _read_events(fid, info): """Read events.""" events = np.zeros([info['n_events'], info['n_segments'] * info['n_samples']]) fid.seek(36 + info['n_events'] * 4, 0) # skip header for si in range(info['n_samples']): # skip data channels fid.seek(info['n_channels'] * info['dtype'].itemsize, 1) # read event channels events[:, si] = np.fromfile(fid, info['dtype'], info['n_events']) return events @verbose def read_raw_egi(input_fname, eog=None, misc=None, include=None, exclude=None, preload=False, channel_naming='E%d', verbose=None): """Read EGI simple binary as raw object. .. note:: This function attempts to create a synthetic trigger channel. See the Notes section below. Parameters ---------- input_fname : path-like Path to the raw file. Files with an extension .mff are automatically considered to be EGI's native MFF format files. eog : list or tuple Names of channels or list of indices that should be designated EOG channels. Default is None. misc : list or tuple Names of channels or list of indices that should be designated MISC channels. Default is None. include : None | list The event channels to be ignored when creating the synthetic trigger. Defaults to None. Note. Overrides ``exclude`` parameter. exclude : None | list The event channels to be ignored when creating the synthetic trigger. Defaults to None. If None, channels that have more than one event and the ``sync`` and ``TREV`` channels will be ignored. %(preload)s .. versionadded:: 0.11 channel_naming : str Channel naming convention for the data channels. Defaults to 'E%%d' (resulting in channel names 'E1', 'E2', 'E3'...). The effective default prior to 0.14.0 was 'EEG %%03d'. .. versionadded:: 0.14.0 %(verbose)s Returns ------- raw : instance of RawEGI A Raw object containing EGI data. See Also -------- mne.io.Raw : Documentation of attribute and methods. Notes ----- The trigger channel names are based on the arbitrary user dependent event codes used. However this function will attempt to generate a **synthetic trigger channel** named ``STI 014`` in accordance with the general Neuromag / MNE naming pattern. The event_id assignment equals ``np.arange(n_events) + 1``. The resulting ``event_id`` mapping is stored as attribute to the resulting raw object but will be ignored when saving to a fiff. Note. The trigger channel is artificially constructed based on timestamps received by the Netstation. As a consequence, triggers have only short durations. This step will fail if events are not mutually exclusive. """ _validate_type(input_fname, 'path-like', 'input_fname') input_fname = str(input_fname) if input_fname.endswith('.mff'): return _read_raw_egi_mff(input_fname, eog, misc, include, exclude, preload, channel_naming, verbose) return RawEGI(input_fname, eog, misc, include, exclude, preload, channel_naming, verbose) class RawEGI(BaseRaw): """Raw object from EGI simple binary file.""" @verbose def __init__(self, input_fname, eog=None, misc=None, include=None, exclude=None, preload=False, channel_naming='E%d', verbose=None): # noqa: D102 input_fname = _check_fname(input_fname, 'read', True, 'input_fname') if eog is None: eog = [] if misc is None: misc = [] with open(input_fname, 'rb') as fid: # 'rb' important for py3k logger.info('Reading EGI header from %s...' % input_fname) egi_info = _read_header(fid) logger.info(' Reading events ...') egi_events = _read_events(fid, egi_info) # update info + jump if egi_info['value_range'] != 0 and egi_info['bits'] != 0: cal = egi_info['value_range'] / 2. ** egi_info['bits'] else: cal = 1e-6 logger.info(' Assembling measurement info ...') event_codes = [] if egi_info['n_events'] > 0: event_codes = list(egi_info['event_codes']) if include is None: exclude_list = ['sync', 'TREV'] if exclude is None else exclude exclude_inds = [i for i, k in enumerate(event_codes) if k in exclude_list] more_excludes = [] if exclude is None: for ii, event in enumerate(egi_events): if event.sum() <= 1 and event_codes[ii]: more_excludes.append(ii) if len(exclude_inds) + len(more_excludes) == len(event_codes): warn('Did not find any event code with more than one ' 'event.', RuntimeWarning) else: exclude_inds.extend(more_excludes) exclude_inds.sort() include_ = [i for i in np.arange(egi_info['n_events']) if i not in exclude_inds] include_names = [k for i, k in enumerate(event_codes) if i in include_] else: include_ = [i for i, k in enumerate(event_codes) if k in include] include_names = include for kk, v in [('include', include_names), ('exclude', exclude)]: if isinstance(v, list): for k in v: if k not in event_codes: raise ValueError('Could find event named "%s"' % k) elif v is not None: raise ValueError('`%s` must be None or of type list' % kk) event_ids = np.arange(len(include_)) + 1 logger.info(' Synthesizing trigger channel "STI 014" ...') logger.info(' Excluding events {%s} ...' % ", ".join([k for i, k in enumerate(event_codes) if i not in include_])) egi_info['new_trigger'] = _combine_triggers( egi_events[include_], remapping=event_ids) self.event_id = dict(zip([e for e in event_codes if e in include_names], event_ids)) else: # No events self.event_id = None egi_info['new_trigger'] = None info = _empty_info(egi_info['samp_rate']) my_time = datetime.datetime( egi_info['year'], egi_info['month'], egi_info['day'], egi_info['hour'], egi_info['minute'], egi_info['second']) my_timestamp = time.mktime(my_time.timetuple()) info['meas_date'] = (my_timestamp, 0) ch_names = [channel_naming % (i + 1) for i in range(egi_info['n_channels'])] ch_names.extend(list(egi_info['event_codes'])) if egi_info['new_trigger'] is not None: ch_names.append('STI 014') # our new_trigger nchan = len(ch_names) cals = np.repeat(cal, nchan) ch_coil = FIFF.FIFFV_COIL_EEG ch_kind = FIFF.FIFFV_EEG_CH chs = _create_chs(ch_names, cals, ch_coil, ch_kind, eog, (), (), misc) sti_ch_idx = [i for i, name in enumerate(ch_names) if name.startswith('STI') or name in event_codes] for idx in sti_ch_idx: chs[idx].update({'unit_mul': FIFF.FIFF_UNITM_NONE, 'cal': 1., 'kind': FIFF.FIFFV_STIM_CH, 'coil_type': FIFF.FIFFV_COIL_NONE, 'unit': FIFF.FIFF_UNIT_NONE}) info['chs'] = chs info._update_redundant() super(RawEGI, self).__init__( info, preload, orig_format=egi_info['orig_format'], filenames=[input_fname], last_samps=[egi_info['n_samples'] - 1], raw_extras=[egi_info], verbose=verbose) def _read_segment_file(self, data, idx, fi, start, stop, cals, mult): """Read a segment of data from a file.""" egi_info = self._raw_extras[fi] dtype = egi_info['dtype'] n_chan_read = egi_info['n_channels'] + egi_info['n_events'] offset = 36 + egi_info['n_events'] * 4 trigger_ch = egi_info['new_trigger'] _read_segments_file(self, data, idx, fi, start, stop, cals, mult, dtype=dtype, n_channels=n_chan_read, offset=offset, trigger_ch=trigger_ch)
# Author: Martin Luessi <mluessi@nmr.mgh.harvard.edu> # # License: Simplified BSD import os.path as op import pytest import numpy as np from numpy.testing import assert_array_almost_equal, assert_allclose import mne from mne.datasets import testing from mne import (read_cov, read_forward_solution, read_evokeds, convert_forward_solution, VectorSourceEstimate) from mne.cov import regularize from mne.inverse_sparse import gamma_map from mne.inverse_sparse.mxne_inverse import make_stc_from_dipoles from mne.minimum_norm.tests.test_inverse import (assert_stc_res, assert_var_exp_log) from mne import pick_types_forward from mne.utils import assert_stcs_equal, catch_logging from mne.dipole import Dipole data_path = testing.data_path(download=False) fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif') fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif') fname_fwd = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif') subjects_dir = op.join(data_path, 'subjects') def _check_stc(stc, evoked, idx, hemi, fwd, dist_limit=0., ratio=50., res=None, atol=1e-20): """Check correctness.""" assert_array_almost_equal(stc.times, evoked.times, 5) stc_orig = stc if isinstance(stc, VectorSourceEstimate): assert stc.data.any(1).any(1).all() # all dipoles should have some stc = stc.magnitude() amps = np.sum(stc.data ** 2, axis=1) order = np.argsort(amps)[::-1] amps = amps[order] verts = np.concatenate(stc.vertices)[order] hemi_idx = int(order[0] >= len(stc.vertices[1])) hemis = ['lh', 'rh'] assert hemis[hemi_idx] == hemi dist = np.linalg.norm(np.diff(fwd['src'][hemi_idx]['rr'][[idx, verts[0]]], axis=0)[0]) * 1000. assert dist <= dist_limit assert amps[0] > ratio * amps[1] if res is not None: assert_stc_res(evoked, stc_orig, fwd, res, atol=atol) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_standard(): """Test Gamma MAP inverse.""" forward = read_forward_solution(fname_fwd) forward = convert_forward_solution(forward, surf_ori=True) forward = pick_types_forward(forward, meg=False, eeg=True) evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.14) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) alpha = 0.5 with catch_logging() as log: stc = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, verbose=True) _check_stc(stc, evoked, 68477, 'lh', fwd=forward) assert_var_exp_log(log.getvalue(), 20, 22) with catch_logging() as log: stc_vec, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, pick_ori='vector', return_residual=True, verbose=True) assert_var_exp_log(log.getvalue(), 20, 22) assert_stcs_equal(stc_vec.magnitude(), stc) _check_stc(stc_vec, evoked, 68477, 'lh', fwd=forward, res=res) stc, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, pick_ori='vector', return_residual=True) _check_stc(stc, evoked, 82010, 'lh', fwd=forward, dist_limit=6., ratio=2., res=res) with catch_logging() as log: dips = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, return_as_dipoles=True, verbose=True) exp_var = assert_var_exp_log(log.getvalue(), 58, 60) dip_exp_var = np.mean(sum(dip.gof for dip in dips)) assert_allclose(exp_var, dip_exp_var, atol=10) # not really equiv, close assert (isinstance(dips[0], Dipole)) stc_dip = make_stc_from_dipoles(dips, forward['src']) assert_stcs_equal(stc.magnitude(), stc_dip) # force fixed orientation stc, res = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, loose=0, return_residual=True) _check_stc(stc, evoked, 85739, 'lh', fwd=forward, ratio=20., res=res) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_vol_sphere(): """Gamma MAP with a sphere forward and volumic source space.""" evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.16) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) info = evoked.info sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=0.080) src = mne.setup_volume_source_space(subject=None, pos=30., mri=None, sphere=(0.0, 0.0, 0.0, 0.08), bem=None, mindist=5.0, exclude=2.0, sphere_units='m') fwd = mne.make_forward_solution(info, trans=None, src=src, bem=sphere, eeg=False, meg=True) alpha = 0.5 pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0, return_residual=False) pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0.2, return_residual=False) stc = gamma_map(evoked, fwd, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, return_residual=False) assert_array_almost_equal(stc.times, evoked.times, 5) # Compare orientation obtained using fit_dipole and gamma_map # for a simulated evoked containing a single dipole stc = mne.VolSourceEstimate(50e-9 * np.random.RandomState(42).randn(1, 4), vertices=[stc.vertices[0][:1]], tmin=stc.tmin, tstep=stc.tstep) evoked_dip = mne.simulation.simulate_evoked(fwd, stc, info, cov, nave=1e9, use_cps=True) dip_gmap = gamma_map(evoked_dip, fwd, cov, 0.1, return_as_dipoles=True) amp_max = [np.max(d.amplitude) for d in dip_gmap] dip_gmap = dip_gmap[np.argmax(amp_max)] assert (dip_gmap[0].pos[0] in src[0]['rr'][stc.vertices[0]]) dip_fit = mne.fit_dipole(evoked_dip, cov, sphere)[0] assert (np.abs(np.dot(dip_fit.ori[0], dip_gmap.ori[0])) > 0.99)
rkmaddox/mne-python
mne/inverse_sparse/tests/test_gamma_map.py
mne/io/egi/egi.py
"""Mayavi/traits GUI for averaging two sets of KIT marker points.""" # Authors: Christian Brodbeck <christianbrodbeck@nyu.edu> # # License: BSD (3-clause) import os import sys import numpy as np from mayavi.tools.mlab_scene_model import MlabSceneModel from pyface.api import confirm, error, FileDialog, OK, YES from traits.api import (HasTraits, HasPrivateTraits, on_trait_change, cached_property, Instance, Property, Array, Bool, Button, Enum, File, Float, List, Str, ArrayOrNone) from traitsui.api import View, Item, HGroup, VGroup, CheckListEditor from traitsui.menu import Action, CancelButton from ..transforms import apply_trans, rotation, translation from ..coreg import fit_matched_points from ..io.kit import read_mrk from ..io._digitization import _write_dig_points from ._viewer import PointObject from ._backend import _get_pyface_backend if _get_pyface_backend() == 'wx': mrk_wildcard = [ 'Supported Files (*.sqd, *.mrk, *.txt, *.pickled)|*.sqd;*.mrk;*.txt;*.pickled', # noqa:E501 'Sqd marker file (*.sqd;*.mrk)|*.sqd;*.mrk', 'Text marker file (*.txt)|*.txt', 'Pickled markers (*.pickled)|*.pickled'] mrk_out_wildcard = ["Tab separated values file (*.txt)|*.txt"] else: if sys.platform in ('win32', 'linux2'): # on Windows and Ubuntu, multiple wildcards does not seem to work mrk_wildcard = ["*.sqd", "*.mrk", "*.txt", "*.pickled"] else: mrk_wildcard = ["*.sqd;*.mrk;*.txt;*.pickled"] mrk_out_wildcard = "*.txt" out_ext = '.txt' use_editor_v = CheckListEditor(cols=1, values=[(i, str(i)) for i in range(5)]) use_editor_h = CheckListEditor(cols=5, values=[(i, str(i)) for i in range(5)]) mrk_view_editable = View( VGroup('file', Item('name', show_label=False, style='readonly'), HGroup( Item('use', editor=use_editor_v, enabled_when="enabled", style='custom'), 'points', ), HGroup(Item('clear', enabled_when="can_save", show_label=False), Item('save_as', enabled_when="can_save", show_label=False)), )) mrk_view_basic = View( VGroup('file', Item('name', show_label=False, style='readonly'), Item('use', editor=use_editor_h, enabled_when="enabled", style='custom'), HGroup(Item('clear', enabled_when="can_save", show_label=False), Item('edit', show_label=False), Item('switch_left_right', label="Switch Left/Right", show_label=False), Item('reorder', show_label=False), Item('save_as', enabled_when="can_save", show_label=False)), )) mrk_view_edit = View(VGroup('points')) class ReorderDialog(HasPrivateTraits): """Dialog for reordering marker points.""" order = Str("0 1 2 3 4") index = Property(List, depends_on='order') is_ok = Property(Bool, depends_on='order') view = View( Item('order', label='New order (five space delimited numbers)'), buttons=[CancelButton, Action(name='OK', enabled_when='is_ok')]) def _get_index(self): try: return [int(i) for i in self.order.split()] except ValueError: return [] def _get_is_ok(self): return sorted(self.index) == [0, 1, 2, 3, 4] class MarkerPoints(HasPrivateTraits): """Represent 5 marker points.""" points = Array(float, (5, 3)) can_save = Property(depends_on='points') save_as = Button() view = View(VGroup('points', Item('save_as', enabled_when='can_save'))) @cached_property def _get_can_save(self): return np.any(self.points) def _save_as_fired(self): dlg = FileDialog(action="save as", wildcard=mrk_out_wildcard, default_filename=self.name, default_directory=self.dir) dlg.open() if dlg.return_code != OK: return path, ext = os.path.splitext(dlg.path) if not path.endswith(out_ext) and len(ext) != 0: ValueError("The extension '%s' is not supported." % ext) path = path + out_ext if os.path.exists(path): answer = confirm(None, "The file %r already exists. Should it " "be replaced?", "Overwrite File?") if answer != YES: return self.save(path) def save(self, path): """Save the marker points. Parameters ---------- path : str Path to the file to write. The kind of file to write is determined based on the extension: '.txt' for tab separated text file, '.pickled' for pickled file. """ _write_dig_points(path, self.points) class MarkerPointSource(MarkerPoints): # noqa: D401 """MarkerPoints subclass for source files.""" file = File(filter=mrk_wildcard, exists=True) name = Property(Str, depends_on='file') dir = Property(Str, depends_on='file') use = List(list(range(5)), desc="Which points to use for the interpolated " "marker.") enabled = Property(Bool, depends_on=['points', 'use']) clear = Button(desc="Clear the current marker data") edit = Button(desc="Edit the marker coordinates manually") switch_left_right = Button( desc="Switch left and right marker points; this is intended to " "correct for markers that were attached in the wrong order") reorder = Button(desc="Change the order of the marker points") view = mrk_view_basic @cached_property def _get_enabled(self): return np.any(self.points) @cached_property def _get_dir(self): if self.file: return os.path.dirname(self.file) @cached_property def _get_name(self): if self.file: return os.path.basename(self.file) @on_trait_change('file') def load(self, fname): if not fname: self.reset_traits(['points']) return try: pts = read_mrk(fname) except Exception as err: error(None, str(err), "Error Reading mrk") self.reset_traits(['points']) else: self.points = pts def _clear_fired(self): self.reset_traits(['file', 'points', 'use']) def _edit_fired(self): self.edit_traits(view=mrk_view_edit) def _reorder_fired(self): dlg = ReorderDialog() ui = dlg.edit_traits(kind='modal') if not ui.result: # user pressed cancel return self.points = self.points[dlg.index] def _switch_left_right_fired(self): self.points = self.points[[1, 0, 2, 4, 3]] class MarkerPointDest(MarkerPoints): # noqa: D401 """MarkerPoints subclass that serves for derived points.""" src1 = Instance(MarkerPointSource) src2 = Instance(MarkerPointSource) name = Property(Str, depends_on='src1.name,src2.name') dir = Property(Str, depends_on='src1.dir,src2.dir') points = Property(ArrayOrNone(float, (5, 3)), depends_on=['method', 'src1.points', 'src1.use', 'src2.points', 'src2.use']) enabled = Property(Bool, depends_on=['points']) method = Enum('Transform', 'Average', desc="Transform: estimate a rotation" "/translation from mrk1 to mrk2; Average: use the average " "of the mrk1 and mrk2 coordinates for each point.") view = View(VGroup(Item('method', style='custom'), Item('save_as', enabled_when='can_save', show_label=False))) @cached_property def _get_dir(self): return self.src1.dir @cached_property def _get_name(self): n1 = self.src1.name n2 = self.src2.name if not n1: if n2: return n2 else: return '' elif not n2: return n1 if n1 == n2: return n1 i = 0 l1 = len(n1) - 1 l2 = len(n1) - 2 while n1[i] == n2[i]: if i == l1: return n1 elif i == l2: return n2 i += 1 return n1[:i] @cached_property def _get_enabled(self): return np.any(self.points) @cached_property def _get_points(self): # in case only one or no source is enabled if not (self.src1 and self.src1.enabled): if (self.src2 and self.src2.enabled): return self.src2.points else: return np.zeros((5, 3)) elif not (self.src2 and self.src2.enabled): return self.src1.points # Average method if self.method == 'Average': if len(np.union1d(self.src1.use, self.src2.use)) < 5: error(None, "Need at least one source for each point.", "Marker Average Error") return np.zeros((5, 3)) pts = (self.src1.points + self.src2.points) / 2. for i in np.setdiff1d(self.src1.use, self.src2.use): pts[i] = self.src1.points[i] for i in np.setdiff1d(self.src2.use, self.src1.use): pts[i] = self.src2.points[i] return pts # Transform method idx = np.intersect1d(np.array(self.src1.use), np.array(self.src2.use), assume_unique=True) if len(idx) < 3: error(None, "Need at least three shared points for trans" "formation.", "Marker Interpolation Error") return np.zeros((5, 3)) src_pts = self.src1.points[idx] tgt_pts = self.src2.points[idx] est = fit_matched_points(src_pts, tgt_pts, out='params') rot = np.array(est[:3]) / 2. tra = np.array(est[3:]) / 2. if len(self.src1.use) == 5: trans = np.dot(translation(*tra), rotation(*rot)) pts = apply_trans(trans, self.src1.points) elif len(self.src2.use) == 5: trans = np.dot(translation(* -tra), rotation(* -rot)) pts = apply_trans(trans, self.src2.points) else: trans1 = np.dot(translation(*tra), rotation(*rot)) pts = apply_trans(trans1, self.src1.points) trans2 = np.dot(translation(* -tra), rotation(* -rot)) for i in np.setdiff1d(self.src2.use, self.src1.use): pts[i] = apply_trans(trans2, self.src2.points[i]) return pts class CombineMarkersModel(HasPrivateTraits): """Combine markers model.""" mrk1_file = Instance(File) mrk2_file = Instance(File) mrk1 = Instance(MarkerPointSource) mrk2 = Instance(MarkerPointSource) mrk3 = Instance(MarkerPointDest) clear = Button(desc="Clear the current marker data") # stats distance = Property(Str, depends_on=['mrk1.points', 'mrk2.points']) def _clear_fired(self): self.mrk1.clear = True self.mrk2.clear = True self.mrk3.reset_traits(['method']) def _mrk1_default(self): return MarkerPointSource() def _mrk1_file_default(self): return self.mrk1.trait('file') def _mrk2_default(self): return MarkerPointSource() def _mrk2_file_default(self): return self.mrk2.trait('file') def _mrk3_default(self): return MarkerPointDest(src1=self.mrk1, src2=self.mrk2) @cached_property def _get_distance(self): if (self.mrk1 is None or self.mrk2 is None or (not np.any(self.mrk1.points)) or (not np.any(self.mrk2.points))): return "" ds = np.sqrt(np.sum((self.mrk1.points - self.mrk2.points) ** 2, 1)) desc = '\t'.join('%.1f mm' % (d * 1000) for d in ds) return desc class CombineMarkersPanel(HasTraits): # noqa: D401 """Has two marker points sources and interpolates to a third one.""" model = Instance(CombineMarkersModel, ()) # model references for UI mrk1 = Instance(MarkerPointSource) mrk2 = Instance(MarkerPointSource) mrk3 = Instance(MarkerPointDest) distance = Str # Visualization scene = Instance(MlabSceneModel) scale = Float(5e-3) mrk1_obj = Instance(PointObject) mrk2_obj = Instance(PointObject) mrk3_obj = Instance(PointObject) trans = Array() view = View(VGroup(VGroup(Item('mrk1', style='custom'), Item('mrk1_obj', style='custom'), show_labels=False, label="Source Marker 1", show_border=True), VGroup(Item('mrk2', style='custom'), Item('mrk2_obj', style='custom'), show_labels=False, label="Source Marker 2", show_border=True), VGroup(Item('distance', style='readonly'), label='Stats', show_border=True), VGroup(Item('mrk3', style='custom'), Item('mrk3_obj', style='custom'), show_labels=False, label="New Marker", show_border=True), )) def _mrk1_default(self): return self.model.mrk1 def _mrk2_default(self): return self.model.mrk2 def _mrk3_default(self): return self.model.mrk3 def __init__(self, *args, **kwargs): # noqa: D102 super(CombineMarkersPanel, self).__init__(*args, **kwargs) self.model.sync_trait('distance', self, 'distance', mutual=False) self.mrk1_obj = PointObject(scene=self.scene, color=(0.608, 0.216, 0.216), point_scale=self.scale) self.model.mrk1.sync_trait( 'enabled', self.mrk1_obj, 'visible', mutual=False) self.mrk2_obj = PointObject(scene=self.scene, color=(0.216, 0.608, 0.216), point_scale=self.scale) self.model.mrk2.sync_trait( 'enabled', self.mrk2_obj, 'visible', mutual=False) self.mrk3_obj = PointObject(scene=self.scene, color=(0.588, 0.784, 1.), point_scale=self.scale) self.model.mrk3.sync_trait( 'enabled', self.mrk3_obj, 'visible', mutual=False) @on_trait_change('model:mrk1:points,trans') def _update_mrk1(self): if self.mrk1_obj is not None: self.mrk1_obj.points = apply_trans(self.trans, self.model.mrk1.points) @on_trait_change('model:mrk2:points,trans') def _update_mrk2(self): if self.mrk2_obj is not None: self.mrk2_obj.points = apply_trans(self.trans, self.model.mrk2.points) @on_trait_change('model:mrk3:points,trans') def _update_mrk3(self): if self.mrk3_obj is not None: self.mrk3_obj.points = apply_trans(self.trans, self.model.mrk3.points)
# Author: Martin Luessi <mluessi@nmr.mgh.harvard.edu> # # License: Simplified BSD import os.path as op import pytest import numpy as np from numpy.testing import assert_array_almost_equal, assert_allclose import mne from mne.datasets import testing from mne import (read_cov, read_forward_solution, read_evokeds, convert_forward_solution, VectorSourceEstimate) from mne.cov import regularize from mne.inverse_sparse import gamma_map from mne.inverse_sparse.mxne_inverse import make_stc_from_dipoles from mne.minimum_norm.tests.test_inverse import (assert_stc_res, assert_var_exp_log) from mne import pick_types_forward from mne.utils import assert_stcs_equal, catch_logging from mne.dipole import Dipole data_path = testing.data_path(download=False) fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif') fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif') fname_fwd = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif') subjects_dir = op.join(data_path, 'subjects') def _check_stc(stc, evoked, idx, hemi, fwd, dist_limit=0., ratio=50., res=None, atol=1e-20): """Check correctness.""" assert_array_almost_equal(stc.times, evoked.times, 5) stc_orig = stc if isinstance(stc, VectorSourceEstimate): assert stc.data.any(1).any(1).all() # all dipoles should have some stc = stc.magnitude() amps = np.sum(stc.data ** 2, axis=1) order = np.argsort(amps)[::-1] amps = amps[order] verts = np.concatenate(stc.vertices)[order] hemi_idx = int(order[0] >= len(stc.vertices[1])) hemis = ['lh', 'rh'] assert hemis[hemi_idx] == hemi dist = np.linalg.norm(np.diff(fwd['src'][hemi_idx]['rr'][[idx, verts[0]]], axis=0)[0]) * 1000. assert dist <= dist_limit assert amps[0] > ratio * amps[1] if res is not None: assert_stc_res(evoked, stc_orig, fwd, res, atol=atol) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_standard(): """Test Gamma MAP inverse.""" forward = read_forward_solution(fname_fwd) forward = convert_forward_solution(forward, surf_ori=True) forward = pick_types_forward(forward, meg=False, eeg=True) evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.14) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) alpha = 0.5 with catch_logging() as log: stc = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, verbose=True) _check_stc(stc, evoked, 68477, 'lh', fwd=forward) assert_var_exp_log(log.getvalue(), 20, 22) with catch_logging() as log: stc_vec, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, pick_ori='vector', return_residual=True, verbose=True) assert_var_exp_log(log.getvalue(), 20, 22) assert_stcs_equal(stc_vec.magnitude(), stc) _check_stc(stc_vec, evoked, 68477, 'lh', fwd=forward, res=res) stc, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, pick_ori='vector', return_residual=True) _check_stc(stc, evoked, 82010, 'lh', fwd=forward, dist_limit=6., ratio=2., res=res) with catch_logging() as log: dips = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, return_as_dipoles=True, verbose=True) exp_var = assert_var_exp_log(log.getvalue(), 58, 60) dip_exp_var = np.mean(sum(dip.gof for dip in dips)) assert_allclose(exp_var, dip_exp_var, atol=10) # not really equiv, close assert (isinstance(dips[0], Dipole)) stc_dip = make_stc_from_dipoles(dips, forward['src']) assert_stcs_equal(stc.magnitude(), stc_dip) # force fixed orientation stc, res = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, loose=0, return_residual=True) _check_stc(stc, evoked, 85739, 'lh', fwd=forward, ratio=20., res=res) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_vol_sphere(): """Gamma MAP with a sphere forward and volumic source space.""" evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.16) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) info = evoked.info sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=0.080) src = mne.setup_volume_source_space(subject=None, pos=30., mri=None, sphere=(0.0, 0.0, 0.0, 0.08), bem=None, mindist=5.0, exclude=2.0, sphere_units='m') fwd = mne.make_forward_solution(info, trans=None, src=src, bem=sphere, eeg=False, meg=True) alpha = 0.5 pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0, return_residual=False) pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0.2, return_residual=False) stc = gamma_map(evoked, fwd, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, return_residual=False) assert_array_almost_equal(stc.times, evoked.times, 5) # Compare orientation obtained using fit_dipole and gamma_map # for a simulated evoked containing a single dipole stc = mne.VolSourceEstimate(50e-9 * np.random.RandomState(42).randn(1, 4), vertices=[stc.vertices[0][:1]], tmin=stc.tmin, tstep=stc.tstep) evoked_dip = mne.simulation.simulate_evoked(fwd, stc, info, cov, nave=1e9, use_cps=True) dip_gmap = gamma_map(evoked_dip, fwd, cov, 0.1, return_as_dipoles=True) amp_max = [np.max(d.amplitude) for d in dip_gmap] dip_gmap = dip_gmap[np.argmax(amp_max)] assert (dip_gmap[0].pos[0] in src[0]['rr'][stc.vertices[0]]) dip_fit = mne.fit_dipole(evoked_dip, cov, sphere)[0] assert (np.abs(np.dot(dip_fit.ori[0], dip_gmap.ori[0])) > 0.99)
rkmaddox/mne-python
mne/inverse_sparse/tests/test_gamma_map.py
mne/gui/_marker_gui.py
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Denis Engemann <denis.engemann@gmail.com> # # License: BSD (3-clause) from collections import defaultdict from colorsys import hsv_to_rgb, rgb_to_hsv import copy as cp import os import os.path as op import re import numpy as np from .morph_map import read_morph_map from .parallel import parallel_func, check_n_jobs from .source_estimate import (SourceEstimate, VolSourceEstimate, _center_of_mass, extract_label_time_course, spatial_src_adjacency) from .source_space import (add_source_space_distances, SourceSpaces, read_freesurfer_lut, _import_nibabel) from .stats.cluster_level import _find_clusters, _get_components from .surface import read_surface, fast_cross_3d, mesh_edges, mesh_dist from .transforms import apply_trans from .utils import (get_subjects_dir, _check_subject, logger, verbose, warn, check_random_state, _validate_type, fill_doc, _check_option, check_version) def _blend_colors(color_1, color_2): """Blend two colors in HSV space. Parameters ---------- color_1, color_2 : None | tuple RGBA tuples with values between 0 and 1. None if no color is available. If both colors are None, the output is None. If only one is None, the output is the other color. Returns ------- color : None | tuple RGBA tuple of the combined color. Saturation, value and alpha are averaged, whereas the new hue is determined as angle half way between the two input colors' hues. """ if color_1 is None and color_2 is None: return None elif color_1 is None: return color_2 elif color_2 is None: return color_1 r_1, g_1, b_1, a_1 = color_1 h_1, s_1, v_1 = rgb_to_hsv(r_1, g_1, b_1) r_2, g_2, b_2, a_2 = color_2 h_2, s_2, v_2 = rgb_to_hsv(r_2, g_2, b_2) hue_diff = abs(h_1 - h_2) if hue_diff < 0.5: h = min(h_1, h_2) + hue_diff / 2. else: h = max(h_1, h_2) + (1. - hue_diff) / 2. h %= 1. s = (s_1 + s_2) / 2. v = (v_1 + v_2) / 2. r, g, b = hsv_to_rgb(h, s, v) a = (a_1 + a_2) / 2. color = (r, g, b, a) return color def _split_colors(color, n): """Create n colors in HSV space that occupy a gradient in value. Parameters ---------- color : tuple RGBA tuple with values between 0 and 1. n : int >= 2 Number of colors on the gradient. Returns ------- colors : tuple of tuples, len = n N RGBA tuples that occupy a gradient in value (low to high) but share saturation and hue with the input color. """ r, g, b, a = color h, s, v = rgb_to_hsv(r, g, b) gradient_range = np.sqrt(n / 10.) if v > 0.5: v_max = min(0.95, v + gradient_range / 2) v_min = max(0.05, v_max - gradient_range) else: v_min = max(0.05, v - gradient_range / 2) v_max = min(0.95, v_min + gradient_range) hsv_colors = ((h, s, v_) for v_ in np.linspace(v_min, v_max, n)) rgb_colors = (hsv_to_rgb(h_, s_, v_) for h_, s_, v_ in hsv_colors) rgba_colors = ((r_, g_, b_, a,) for r_, g_, b_ in rgb_colors) return tuple(rgba_colors) def _n_colors(n, bytes_=False, cmap='hsv'): """Produce a list of n unique RGBA color tuples based on a colormap. Parameters ---------- n : int Number of colors. bytes : bool Return colors as integers values between 0 and 255 (instead of floats between 0 and 1). cmap : str Which colormap to use. Returns ------- colors : array, shape (n, 4) RGBA color values. """ n_max = 2 ** 10 if n > n_max: raise NotImplementedError("Can't produce more than %i unique " "colors" % n_max) from matplotlib.cm import get_cmap cm = get_cmap(cmap, n_max) pos = np.linspace(0, 1, n, False) colors = cm(pos, bytes=bytes_) if bytes_: # make sure colors are unique for ii, c in enumerate(colors): if np.any(np.all(colors[:ii] == c, 1)): raise RuntimeError('Could not get %d unique colors from %s ' 'colormap. Try using a different colormap.' % (n, cmap)) return colors @fill_doc class Label(object): """A FreeSurfer/MNE label with vertices restricted to one hemisphere. Labels can be combined with the ``+`` operator: * Duplicate vertices are removed. * If duplicate vertices have conflicting position values, an error is raised. * Values of duplicate vertices are summed. Parameters ---------- vertices : array, shape (N,) Vertex indices (0 based). pos : array, shape (N, 3) | None Locations in meters. If None, then zeros are used. values : array, shape (N,) | None Values at the vertices. If None, then ones are used. hemi : 'lh' | 'rh' Hemisphere to which the label applies. comment : str Kept as information but not used by the object itself. name : str Kept as information but not used by the object itself. filename : str Kept as information but not used by the object itself. subject : str | None Name of the subject the label is from. color : None | matplotlib color Default label color and alpha (e.g., ``(1., 0., 0., 1.)`` for red). %(verbose)s Attributes ---------- color : None | tuple Default label color, represented as RGBA tuple with values between 0 and 1. comment : str Comment from the first line of the label file. hemi : 'lh' | 'rh' Hemisphere. name : None | str A name for the label. It is OK to change that attribute manually. pos : array, shape (N, 3) Locations in meters. subject : str | None Subject name. It is best practice to set this to the proper value on initialization, but it can also be set manually. values : array, shape (N,) Values at the vertices. %(verbose)s vertices : array, shape (N,) Vertex indices (0 based) """ @verbose def __init__(self, vertices=(), pos=None, values=None, hemi=None, comment="", name=None, filename=None, subject=None, color=None, verbose=None): # noqa: D102 # check parameters if not isinstance(hemi, str): raise ValueError('hemi must be a string, not %s' % type(hemi)) vertices = np.asarray(vertices, int) if np.any(np.diff(vertices.astype(int)) <= 0): raise ValueError('Vertices must be ordered in increasing order.') if color is not None: from matplotlib.colors import colorConverter color = colorConverter.to_rgba(color) if values is None: values = np.ones(len(vertices)) else: values = np.asarray(values) if pos is None: pos = np.zeros((len(vertices), 3)) else: pos = np.asarray(pos) if not (len(vertices) == len(values) == len(pos)): raise ValueError("vertices, values and pos need to have same " "length (number of vertices)") # name if name is None and filename is not None: name = op.basename(filename[:-6]) self.vertices = vertices self.pos = pos self.values = values self.hemi = hemi self.comment = comment self.verbose = verbose self.subject = _check_subject(None, subject, raise_error=False) self.color = color self.name = name self.filename = filename def __setstate__(self, state): # noqa: D105 self.vertices = state['vertices'] self.pos = state['pos'] self.values = state['values'] self.hemi = state['hemi'] self.comment = state['comment'] self.verbose = state['verbose'] self.subject = state.get('subject', None) self.color = state.get('color', None) self.name = state['name'] self.filename = state['filename'] def __getstate__(self): # noqa: D105 out = dict(vertices=self.vertices, pos=self.pos, values=self.values, hemi=self.hemi, comment=self.comment, verbose=self.verbose, subject=self.subject, color=self.color, name=self.name, filename=self.filename) return out def __repr__(self): # noqa: D105 name = 'unknown, ' if self.subject is None else self.subject + ', ' name += repr(self.name) if self.name is not None else "unnamed" n_vert = len(self) return "<Label | %s, %s : %i vertices>" % (name, self.hemi, n_vert) def __len__(self): """Return the number of vertices. Returns ------- n_vertices : int The number of vertices. """ return len(self.vertices) def __add__(self, other): """Add Labels.""" _validate_type(other, (Label, BiHemiLabel), 'other') if isinstance(other, BiHemiLabel): return other + self else: # isinstance(other, Label) if self.subject != other.subject: raise ValueError('Label subject parameters must match, got ' '"%s" and "%s". Consider setting the ' 'subject parameter on initialization, or ' 'setting label.subject manually before ' 'combining labels.' % (self.subject, other.subject)) if self.hemi != other.hemi: name = '%s + %s' % (self.name, other.name) if self.hemi == 'lh': lh, rh = self.copy(), other.copy() else: lh, rh = other.copy(), self.copy() color = _blend_colors(self.color, other.color) return BiHemiLabel(lh, rh, name, color) # check for overlap duplicates = np.intersect1d(self.vertices, other.vertices) n_dup = len(duplicates) if n_dup: self_dup = [np.where(self.vertices == d)[0][0] for d in duplicates] other_dup = [np.where(other.vertices == d)[0][0] for d in duplicates] if not np.all(self.pos[self_dup] == other.pos[other_dup]): err = ("Labels %r and %r: vertices overlap but differ in " "position values" % (self.name, other.name)) raise ValueError(err) isnew = np.array([v not in duplicates for v in other.vertices]) vertices = np.hstack((self.vertices, other.vertices[isnew])) pos = np.vstack((self.pos, other.pos[isnew])) # find position of other's vertices in new array tgt_idx = [np.where(vertices == v)[0][0] for v in other.vertices] n_self = len(self.values) n_other = len(other.values) new_len = n_self + n_other - n_dup values = np.zeros(new_len, dtype=self.values.dtype) values[:n_self] += self.values values[tgt_idx] += other.values else: vertices = np.hstack((self.vertices, other.vertices)) pos = np.vstack((self.pos, other.pos)) values = np.hstack((self.values, other.values)) indcs = np.argsort(vertices) vertices, pos, values = vertices[indcs], pos[indcs, :], values[indcs] comment = "%s + %s" % (self.comment, other.comment) name0 = self.name if self.name else 'unnamed' name1 = other.name if other.name else 'unnamed' name = "%s + %s" % (name0, name1) color = _blend_colors(self.color, other.color) verbose = self.verbose or other.verbose label = Label(vertices, pos, values, self.hemi, comment, name, None, self.subject, color, verbose) return label def __sub__(self, other): """Subtract Labels.""" _validate_type(other, (Label, BiHemiLabel), 'other') if isinstance(other, BiHemiLabel): if self.hemi == 'lh': return self - other.lh else: return self - other.rh else: # isinstance(other, Label): if self.subject != other.subject: raise ValueError('Label subject parameters must match, got ' '"%s" and "%s". Consider setting the ' 'subject parameter on initialization, or ' 'setting label.subject manually before ' 'combining labels.' % (self.subject, other.subject)) if self.hemi == other.hemi: keep = np.in1d(self.vertices, other.vertices, True, invert=True) else: keep = np.arange(len(self.vertices)) name = "%s - %s" % (self.name or 'unnamed', other.name or 'unnamed') return Label(self.vertices[keep], self.pos[keep], self.values[keep], self.hemi, self.comment, name, None, self.subject, self.color, self.verbose) def save(self, filename): r"""Write to disk as FreeSurfer \*.label file. Parameters ---------- filename : str Path to label file to produce. Notes ----- Note that due to file specification limitations, the Label's subject and color attributes are not saved to disk. """ write_label(filename, self) def copy(self): """Copy the label instance. Returns ------- label : instance of Label The copied label. """ return cp.deepcopy(self) def fill(self, src, name=None): """Fill the surface between sources for a source space label. Parameters ---------- src : SourceSpaces Source space in which the label was defined. If a source space is provided, the label is expanded to fill in surface vertices that lie between the vertices included in the source space. For the added vertices, ``pos`` is filled in with positions from the source space, and ``values`` is filled in from the closest source space vertex. name : None | str Name for the new Label (default is self.name). Returns ------- label : Label The label covering the same vertices in source space but also including intermediate surface vertices. See Also -------- Label.restrict Label.smooth """ # find source space patch info if len(self.vertices) == 0: return self.copy() hemi_src = _get_label_src(self, src) if not np.all(np.in1d(self.vertices, hemi_src['vertno'])): msg = "Source space does not contain all of the label's vertices" raise ValueError(msg) if hemi_src['nearest'] is None: warn("Source space is being modified in place because patch " "information is needed. To avoid this in the future, run " "mne.add_source_space_distances() on the source space " "and save it to disk.") if check_version('scipy', '1.3'): dist_limit = 0 else: warn('SciPy < 1.3 detected, adding source space patch ' 'information will be slower. Consider upgrading SciPy.') dist_limit = np.inf add_source_space_distances(src, dist_limit=dist_limit) nearest = hemi_src['nearest'] # find new vertices include = np.in1d(nearest, self.vertices, False) vertices = np.nonzero(include)[0] # values nearest_in_label = np.digitize(nearest[vertices], self.vertices, True) values = self.values[nearest_in_label] # pos pos = hemi_src['rr'][vertices] name = self.name if name is None else name label = Label(vertices, pos, values, self.hemi, self.comment, name, None, self.subject, self.color) return label def restrict(self, src, name=None): """Restrict a label to a source space. Parameters ---------- src : instance of SourceSpaces The source spaces to use to restrict the label. name : None | str Name for the new Label (default is self.name). Returns ------- label : instance of Label The Label restricted to the set of source space vertices. See Also -------- Label.fill Notes ----- .. versionadded:: 0.20 """ if len(self.vertices) == 0: return self.copy() hemi_src = _get_label_src(self, src) mask = np.in1d(self.vertices, hemi_src['vertno']) name = self.name if name is None else name label = Label(self.vertices[mask], self.pos[mask], self.values[mask], self.hemi, self.comment, name, None, self.subject, self.color) return label @verbose def smooth(self, subject=None, smooth=2, grade=None, subjects_dir=None, n_jobs=1, verbose=None): """Smooth the label. Useful for filling in labels made in a decimated source space for display. Parameters ---------- subject : str | None The name of the subject used. If None, the value will be taken from self.subject. smooth : int Number of iterations for the smoothing of the surface data. Cannot be None here since not all vertices are used. For a grade of 5 (e.g., fsaverage), a smoothing of 2 will fill a label. grade : int, list of shape (2,), array, or None Resolution of the icosahedral mesh (typically 5). If None, all vertices will be used (potentially filling the surface). If a list, values will be morphed to the set of vertices specified in grade[0] and grade[1], assuming that these are vertices for the left and right hemispheres. Note that specifying the vertices (e.g., grade=[np.arange(10242), np.arange(10242)] for fsaverage on a standard grade 5 source space) can be substantially faster than computing vertex locations. If one array is used, it is assumed that all vertices belong to the hemisphere of the label. To create a label filling the surface, use None. %(subjects_dir)s %(n_jobs)s %(verbose_meth)s Returns ------- label : instance of Label The smoothed label. Notes ----- This function will set label.pos to be all zeros. If the positions on the new surface are required, consider using mne.read_surface with ``label.vertices``. """ subject = _check_subject(self.subject, subject) return self.morph(subject, subject, smooth, grade, subjects_dir, n_jobs, verbose) @verbose def morph(self, subject_from=None, subject_to=None, smooth=5, grade=None, subjects_dir=None, n_jobs=1, verbose=None): """Morph the label. Useful for transforming a label from one subject to another. Parameters ---------- subject_from : str | None The name of the subject of the current label. If None, the initial subject will be taken from self.subject. subject_to : str The name of the subject to morph the label to. This will be put in label.subject of the output label file. smooth : int Number of iterations for the smoothing of the surface data. Cannot be None here since not all vertices are used. grade : int, list of shape (2,), array, or None Resolution of the icosahedral mesh (typically 5). If None, all vertices will be used (potentially filling the surface). If a list, values will be morphed to the set of vertices specified in grade[0] and grade[1], assuming that these are vertices for the left and right hemispheres. Note that specifying the vertices (e.g., ``grade=[np.arange(10242), np.arange(10242)]`` for fsaverage on a standard grade 5 source space) can be substantially faster than computing vertex locations. If one array is used, it is assumed that all vertices belong to the hemisphere of the label. To create a label filling the surface, use None. %(subjects_dir)s %(n_jobs)s %(verbose_meth)s Returns ------- label : instance of Label The morphed label. See Also -------- mne.morph_labels : Morph a set of labels. Notes ----- This function will set label.pos to be all zeros. If the positions on the new surface are required, consider using `mne.read_surface` with ``label.vertices``. """ from .morph import compute_source_morph, grade_to_vertices subject_from = _check_subject(self.subject, subject_from) if not isinstance(subject_to, str): raise TypeError('"subject_to" must be entered as a string') if not isinstance(smooth, int): raise TypeError('smooth must be an integer') if np.all(self.values == 0): raise ValueError('Morphing label with all zero values will result ' 'in the label having no vertices. Consider using ' 'something like label.values.fill(1.0).') idx = 0 if self.hemi == 'lh' else 1 if isinstance(grade, np.ndarray): grade_ = [np.array([], int)] * 2 grade_[idx] = grade grade = grade_ del grade_ grade = grade_to_vertices(subject_to, grade, subjects_dir=subjects_dir) spacing = [np.array([], int)] * 2 spacing[idx] = grade[idx] vertices = [np.array([], int)] * 2 vertices[idx] = self.vertices data = self.values[:, np.newaxis] assert len(data) == sum(len(v) for v in vertices) stc = SourceEstimate(data, vertices, tmin=1, tstep=1, subject=subject_from) stc = compute_source_morph( stc, subject_from, subject_to, spacing=spacing, smooth=smooth, subjects_dir=subjects_dir, warn=False).apply(stc) inds = np.nonzero(stc.data)[0] self.values = stc.data[inds, :].ravel() self.pos = np.zeros((len(inds), 3)) self.vertices = stc.vertices[idx][inds] self.subject = subject_to return self @fill_doc def split(self, parts=2, subject=None, subjects_dir=None, freesurfer=False): """Split the Label into two or more parts. Parameters ---------- parts : int >= 2 | tuple of str | str Number of labels to create (default is 2), or tuple of strings specifying label names for new labels (from posterior to anterior), or 'contiguous' to split the label into connected components. If a number or 'contiguous' is specified, names of the new labels will be the input label's name with div1, div2 etc. appended. subject : None | str Subject which this label belongs to (needed to locate surface file; should only be specified if it is not specified in the label). %(subjects_dir)s freesurfer : bool By default (``False``) ``split_label`` uses an algorithm that is slightly optimized for performance and numerical precision. Set ``freesurfer`` to ``True`` in order to replicate label splits from FreeSurfer's ``mris_divide_parcellation``. Returns ------- labels : list of Label, shape (n_parts,) The labels, starting from the lowest to the highest end of the projection axis. Notes ----- If using 'contiguous' split, you must ensure that the label being split uses the same triangular resolution as the surface mesh files in ``subjects_dir`` Also, some small fringe labels may be returned that are close (but not connected) to the large components. The spatial split finds the label's principal eigen-axis on the spherical surface, projects all label vertex coordinates onto this axis, and divides them at regular spatial intervals. """ if isinstance(parts, str) and parts == 'contiguous': return _split_label_contig(self, subject, subjects_dir) elif isinstance(parts, (tuple, int)): return split_label(self, parts, subject, subjects_dir, freesurfer) else: raise ValueError("Need integer, tuple of strings, or string " "('contiguous'). Got %s)" % type(parts)) def get_vertices_used(self, vertices=None): """Get the source space's vertices inside the label. Parameters ---------- vertices : ndarray of int, shape (n_vertices,) | None The set of vertices to compare the label to. If None, equals to ``np.arange(10242)``. Defaults to None. Returns ------- label_verts : ndarray of in, shape (n_label_vertices,) The vertices of the label corresponding used by the data. """ if vertices is None: vertices = np.arange(10242) label_verts = vertices[np.in1d(vertices, self.vertices)] return label_verts def get_tris(self, tris, vertices=None): """Get the source space's triangles inside the label. Parameters ---------- tris : ndarray of int, shape (n_tris, 3) The set of triangles corresponding to the vertices in a source space. vertices : ndarray of int, shape (n_vertices,) | None The set of vertices to compare the label to. If None, equals to ``np.arange(10242)``. Defaults to None. Returns ------- label_tris : ndarray of int, shape (n_tris, 3) The subset of tris used by the label. """ vertices_ = self.get_vertices_used(vertices) selection = np.all(np.in1d(tris, vertices_).reshape(tris.shape), axis=1) label_tris = tris[selection] if len(np.unique(label_tris)) < len(vertices_): logger.info('Surprising label structure. Trying to repair ' 'triangles.') dropped_vertices = np.setdiff1d(vertices_, label_tris) n_dropped = len(dropped_vertices) assert n_dropped == (len(vertices_) - len(np.unique(label_tris))) # put missing vertices as extra zero-length triangles add_tris = (dropped_vertices + np.zeros((len(dropped_vertices), 3), dtype=int).T) label_tris = np.r_[label_tris, add_tris.T] assert len(np.unique(label_tris)) == len(vertices_) return label_tris @fill_doc def center_of_mass(self, subject=None, restrict_vertices=False, subjects_dir=None, surf='sphere'): """Compute the center of mass of the label. This function computes the spatial center of mass on the surface as in :footcite:`LarsonLee2013`. Parameters ---------- subject : str | None The subject the label is defined for. restrict_vertices : bool | array of int | instance of SourceSpaces If True, returned vertex will be one from the label. Otherwise, it could be any vertex from surf. If an array of int, the returned vertex will come from that array. If instance of SourceSpaces (as of 0.13), the returned vertex will be from the given source space. For most accuruate estimates, do not restrict vertices. %(subjects_dir)s surf : str The surface to use for Euclidean distance center of mass finding. The default here is "sphere", which finds the center of mass on the spherical surface to help avoid potential issues with cortical folding. Returns ------- vertex : int Vertex of the spatial center of mass for the inferred hemisphere, with each vertex weighted by its label value. See Also -------- SourceEstimate.center_of_mass vertex_to_mni Notes ----- .. versionadded:: 0.13 References ---------- .. footbibliography:: """ if not isinstance(surf, str): raise TypeError('surf must be a string, got %s' % (type(surf),)) subject = _check_subject(self.subject, subject) if np.any(self.values < 0): raise ValueError('Cannot compute COM with negative values') if np.all(self.values == 0): raise ValueError('Cannot compute COM with all values == 0. For ' 'structural labels, consider setting to ones via ' 'label.values[:] = 1.') vertex = _center_of_mass(self.vertices, self.values, self.hemi, surf, subject, subjects_dir, restrict_vertices) return vertex def _get_label_src(label, src): _validate_type(src, SourceSpaces, 'src') if src.kind != 'surface': raise RuntimeError('Cannot operate on SourceSpaces that are not ' 'surface type, got %s' % (src.kind,)) if label.hemi == 'lh': hemi_src = src[0] else: hemi_src = src[1] return hemi_src class BiHemiLabel(object): """A freesurfer/MNE label with vertices in both hemispheres. Parameters ---------- lh : Label Label for the left hemisphere. rh : Label Label for the right hemisphere. name : None | str Name for the label. color : None | color Label color and alpha (e.g., ``(1., 0., 0., 1.)`` for red). Note that due to file specification limitations, the color isn't saved to or loaded from files written to disk. Attributes ---------- lh : Label Label for the left hemisphere. rh : Label Label for the right hemisphere. name : None | str A name for the label. It is OK to change that attribute manually. subject : str | None Subject the label is from. """ def __init__(self, lh, rh, name=None, color=None): # noqa: D102 if lh.subject != rh.subject: raise ValueError('lh.subject (%s) and rh.subject (%s) must ' 'agree' % (lh.subject, rh.subject)) self.lh = lh self.rh = rh self.name = name self.subject = lh.subject self.color = color self.hemi = 'both' def __repr__(self): # noqa: D105 temp = "<BiHemiLabel | %s, lh : %i vertices, rh : %i vertices>" name = 'unknown, ' if self.subject is None else self.subject + ', ' name += repr(self.name) if self.name is not None else "unnamed" return temp % (name, len(self.lh), len(self.rh)) def __len__(self): """Return the number of vertices. Returns ------- n_vertices : int The number of vertices. """ return len(self.lh) + len(self.rh) def __add__(self, other): """Add labels.""" if isinstance(other, Label): if other.hemi == 'lh': lh = self.lh + other rh = self.rh else: lh = self.lh rh = self.rh + other elif isinstance(other, BiHemiLabel): lh = self.lh + other.lh rh = self.rh + other.rh else: raise TypeError("Need: Label or BiHemiLabel. Got: %r" % other) name = '%s + %s' % (self.name, other.name) color = _blend_colors(self.color, other.color) return BiHemiLabel(lh, rh, name, color) def __sub__(self, other): """Subtract labels.""" _validate_type(other, (Label, BiHemiLabel), 'other') if isinstance(other, Label): if other.hemi == 'lh': lh = self.lh - other rh = self.rh else: rh = self.rh - other lh = self.lh else: # isinstance(other, BiHemiLabel) lh = self.lh - other.lh rh = self.rh - other.rh if len(lh.vertices) == 0: return rh elif len(rh.vertices) == 0: return lh else: name = '%s - %s' % (self.name, other.name) return BiHemiLabel(lh, rh, name, self.color) def read_label(filename, subject=None, color=None): """Read FreeSurfer Label file. Parameters ---------- filename : str Path to label file. subject : str | None Name of the subject the data are defined for. It is good practice to set this attribute to avoid combining incompatible labels and SourceEstimates (e.g., ones from other subjects). Note that due to file specification limitations, the subject name isn't saved to or loaded from files written to disk. color : None | matplotlib color Default label color and alpha (e.g., ``(1., 0., 0., 1.)`` for red). Note that due to file specification limitations, the color isn't saved to or loaded from files written to disk. Returns ------- label : Label Instance of Label object with attributes: - ``comment``: comment from the first line of the label file - ``vertices``: vertex indices (0 based, column 1) - ``pos``: locations in meters (columns 2 - 4 divided by 1000) - ``values``: values at the vertices (column 5) See Also -------- read_labels_from_annot write_labels_to_annot """ if subject is not None and not isinstance(subject, str): raise TypeError('subject must be a string') # find hemi basename = op.basename(filename) if basename.endswith('lh.label') or basename.startswith('lh.'): hemi = 'lh' elif basename.endswith('rh.label') or basename.startswith('rh.'): hemi = 'rh' else: raise ValueError('Cannot find which hemisphere it is. File should end' ' with lh.label or rh.label: %s' % (basename,)) # find name if basename.startswith(('lh.', 'rh.')): basename_ = basename[3:] if basename.endswith('.label'): basename_ = basename[:-6] else: basename_ = basename[:-9] name = "%s-%s" % (basename_, hemi) # read the file with open(filename, 'r') as fid: comment = fid.readline().replace('\n', '')[1:] nv = int(fid.readline()) data = np.empty((5, nv)) for i, line in enumerate(fid): data[:, i] = line.split() # let's make sure everything is ordered correctly vertices = np.array(data[0], dtype=np.int32) pos = 1e-3 * data[1:4].T values = data[4] order = np.argsort(vertices) vertices = vertices[order] pos = pos[order] values = values[order] label = Label(vertices, pos, values, hemi, comment, name, filename, subject, color) return label @verbose def write_label(filename, label, verbose=None): """Write a FreeSurfer label. Parameters ---------- filename : str Path to label file to produce. label : Label The label object to save. %(verbose)s See Also -------- write_labels_to_annot Notes ----- Note that due to file specification limitations, the Label's subject and color attributes are not saved to disk. """ hemi = label.hemi path_head, name = op.split(filename) if name.endswith('.label'): name = name[:-6] if not (name.startswith(hemi) or name.endswith(hemi)): name += '-' + hemi filename = op.join(path_head, name) + '.label' logger.info('Saving label to : %s' % filename) with open(filename, 'wb') as fid: n_vertices = len(label.vertices) data = np.zeros((n_vertices, 5), dtype=np.float64) data[:, 0] = label.vertices data[:, 1:4] = 1e3 * label.pos data[:, 4] = label.values fid.write(b'#%s\n' % label.comment.encode()) fid.write(b'%d\n' % n_vertices) for d in data: fid.write(b'%d %f %f %f %f\n' % tuple(d)) def _prep_label_split(label, subject=None, subjects_dir=None): """Get label and subject information prior to label splitting.""" # If necessary, find the label if isinstance(label, BiHemiLabel): raise TypeError("Can only split labels restricted to one hemisphere.") elif isinstance(label, str): label = read_label(label) # Find the subject subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if label.subject is None and subject is None: raise ValueError("The subject needs to be specified.") elif subject is None: subject = label.subject elif label.subject is None: pass elif subject != label.subject: raise ValueError("The label specifies a different subject (%r) from " "the subject parameter (%r)." % label.subject, subject) return label, subject, subjects_dir def _split_label_contig(label_to_split, subject=None, subjects_dir=None): """Split label into contiguous regions (i.e., connected components). Parameters ---------- label_to_split : Label | str Label which is to be split (Label object or path to a label file). subject : None | str Subject which this label belongs to (needed to locate surface file; should only be specified if it is not specified in the label). %(subjects_dir)s Returns ------- labels : list of Label The contiguous labels, in order of descending size. """ # Convert to correct input if necessary label_to_split, subject, subjects_dir = _prep_label_split(label_to_split, subject, subjects_dir) # Find the spherical surface to get vertices and tris surf_fname = '.'.join((label_to_split.hemi, 'sphere')) surf_path = op.join(subjects_dir, subject, 'surf', surf_fname) surface_points, surface_tris = read_surface(surf_path) # Get vertices we want to keep and compute mesh edges verts_arr = label_to_split.vertices edges_all = mesh_edges(surface_tris) # Subselect rows and cols of vertices that belong to the label select_edges = edges_all[verts_arr][:, verts_arr].tocoo() # Compute connected components and store as lists of vertex numbers comp_labels = _get_components(verts_arr, select_edges) # Convert to indices in the original surface space label_divs = [] for comp in comp_labels: label_divs.append(verts_arr[comp]) # Construct label division names n_parts = len(label_divs) if label_to_split.name.endswith(('lh', 'rh')): basename = label_to_split.name[:-3] name_ext = label_to_split.name[-3:] else: basename = label_to_split.name name_ext = '' name_pattern = "%s_div%%i%s" % (basename, name_ext) names = tuple(name_pattern % i for i in range(1, n_parts + 1)) # Colors if label_to_split.color is None: colors = (None,) * n_parts else: colors = _split_colors(label_to_split.color, n_parts) # Sort label divisions by their size (in vertices) label_divs.sort(key=lambda x: len(x), reverse=True) labels = [] for div, name, color in zip(label_divs, names, colors): # Get indices of dipoles within this division of the label verts = np.array(sorted(list(div)), int) vert_indices = np.in1d(verts_arr, verts, assume_unique=True) # Set label attributes pos = label_to_split.pos[vert_indices] values = label_to_split.values[vert_indices] hemi = label_to_split.hemi comment = label_to_split.comment lbl = Label(verts, pos, values, hemi, comment, name, None, subject, color) labels.append(lbl) return labels @fill_doc def split_label(label, parts=2, subject=None, subjects_dir=None, freesurfer=False): """Split a Label into two or more parts. Parameters ---------- label : Label | str Label which is to be split (Label object or path to a label file). parts : int >= 2 | tuple of str A sequence of strings specifying label names for the new labels (from posterior to anterior), or the number of new labels to create (default is 2). If a number is specified, names of the new labels will be the input label's name with div1, div2 etc. appended. subject : None | str Subject which this label belongs to (needed to locate surface file; should only be specified if it is not specified in the label). %(subjects_dir)s freesurfer : bool By default (``False``) ``split_label`` uses an algorithm that is slightly optimized for performance and numerical precision. Set ``freesurfer`` to ``True`` in order to replicate label splits from FreeSurfer's ``mris_divide_parcellation``. Returns ------- labels : list of Label, shape (n_parts,) The labels, starting from the lowest to the highest end of the projection axis. Notes ----- Works by finding the label's principal eigen-axis on the spherical surface, projecting all label vertex coordinates onto this axis and dividing them at regular spatial intervals. """ from scipy import linalg label, subject, subjects_dir = _prep_label_split(label, subject, subjects_dir) # find the parts if np.isscalar(parts): n_parts = int(parts) if label.name.endswith(('lh', 'rh')): basename = label.name[:-3] name_ext = label.name[-3:] else: basename = label.name name_ext = '' name_pattern = "%s_div%%i%s" % (basename, name_ext) names = tuple(name_pattern % i for i in range(1, n_parts + 1)) else: names = parts n_parts = len(names) if n_parts < 2: raise ValueError("Can't split label into %i parts" % n_parts) # find the spherical surface surf_fname = '.'.join((label.hemi, 'sphere')) surf_path = op.join(subjects_dir, subject, "surf", surf_fname) surface_points, surface_tris = read_surface(surf_path) # find the label coordinates on the surface points = surface_points[label.vertices] center = np.mean(points, axis=0) centered_points = points - center # find the label's normal if freesurfer: # find the Freesurfer vertex closest to the center distance = np.sqrt(np.sum(centered_points ** 2, axis=1)) i_closest = np.argmin(distance) closest_vertex = label.vertices[i_closest] # find the normal according to freesurfer convention idx = np.any(surface_tris == closest_vertex, axis=1) tris_for_normal = surface_tris[idx] r1 = surface_points[tris_for_normal[:, 0], :] r2 = surface_points[tris_for_normal[:, 1], :] r3 = surface_points[tris_for_normal[:, 2], :] tri_normals = fast_cross_3d((r2 - r1), (r3 - r1)) normal = np.mean(tri_normals, axis=0) normal /= linalg.norm(normal) else: # Normal of the center normal = center / linalg.norm(center) # project all vertex coordinates on the tangential plane for this point q, _ = linalg.qr(normal[:, np.newaxis]) tangent_u = q[:, 1:] m_obs = np.dot(centered_points, tangent_u) # find principal eigendirection m_cov = np.dot(m_obs.T, m_obs) w, vr = linalg.eig(m_cov) i = np.argmax(w) eigendir = vr[:, i] # project back into 3d space axis = np.dot(tangent_u, eigendir) # orient them from posterior to anterior if axis[1] < 0: axis *= -1 # project the label on the axis proj = np.dot(points, axis) # assign mark (new label index) proj -= proj.min() proj /= (proj.max() / n_parts) mark = proj // 1 mark[mark == n_parts] = n_parts - 1 # colors if label.color is None: colors = (None,) * n_parts else: colors = _split_colors(label.color, n_parts) # construct new labels labels = [] for i, name, color in zip(range(n_parts), names, colors): idx = (mark == i) vert = label.vertices[idx] pos = label.pos[idx] values = label.values[idx] hemi = label.hemi comment = label.comment lbl = Label(vert, pos, values, hemi, comment, name, None, subject, color) labels.append(lbl) return labels def label_sign_flip(label, src): """Compute sign for label averaging. Parameters ---------- label : Label | BiHemiLabel A label. src : SourceSpaces The source space over which the label is defined. Returns ------- flip : array Sign flip vector (contains 1 or -1). """ from scipy import linalg if len(src) != 2: raise ValueError('Only source spaces with 2 hemisphers are accepted') lh_vertno = src[0]['vertno'] rh_vertno = src[1]['vertno'] # get source orientations ori = list() if label.hemi in ('lh', 'both'): vertices = label.vertices if label.hemi == 'lh' else label.lh.vertices vertno_sel = np.intersect1d(lh_vertno, vertices) ori.append(src[0]['nn'][vertno_sel]) if label.hemi in ('rh', 'both'): vertices = label.vertices if label.hemi == 'rh' else label.rh.vertices vertno_sel = np.intersect1d(rh_vertno, vertices) ori.append(src[1]['nn'][vertno_sel]) if len(ori) == 0: raise Exception('Unknown hemisphere type "%s"' % (label.hemi,)) ori = np.concatenate(ori, axis=0) if len(ori) == 0: return np.array([], int) _, _, Vh = linalg.svd(ori, full_matrices=False) # The sign of Vh is ambiguous, so we should align to the max-positive # (outward) direction dots = np.dot(ori, Vh[0]) if np.mean(dots) < 0: dots *= -1 # Comparing to the direction of the first right singular vector flip = np.sign(dots) return flip @verbose def stc_to_label(stc, src=None, smooth=True, connected=False, subjects_dir=None, verbose=None): """Compute a label from the non-zero sources in an stc object. Parameters ---------- stc : SourceEstimate The source estimates. src : SourceSpaces | str | None The source space over which the source estimates are defined. If it's a string it should the subject name (e.g. fsaverage). Can be None if stc.subject is not None. smooth : bool Fill in vertices on the cortical surface that are not in the source space based on the closest source space vertex (requires src to be a SourceSpace). connected : bool If True a list of connected labels will be returned in each hemisphere. The labels are ordered in decreasing order depending of the maximum value in the stc. %(subjects_dir)s %(verbose)s Returns ------- labels : list of Label | list of list of Label The generated labels. If connected is False, it returns a list of Labels (one per hemisphere). If no Label is available in a hemisphere, None is returned. If connected is True, it returns for each hemisphere a list of connected labels ordered in decreasing order depending of the maximum value in the stc. If no Label is available in an hemisphere, an empty list is returned. """ if not isinstance(smooth, bool): raise ValueError('smooth should be True or False. Got %s.' % smooth) src = stc.subject if src is None else src if src is None: raise ValueError('src cannot be None if stc.subject is None') if isinstance(src, str): subject = src else: subject = stc.subject if not isinstance(stc, SourceEstimate): raise ValueError('SourceEstimate should be surface source estimates') if isinstance(src, str): if connected: raise ValueError('The option to return only connected labels is ' 'only available if source spaces are provided.') if smooth: msg = ("stc_to_label with smooth=True requires src to be an " "instance of SourceSpace") raise ValueError(msg) subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) surf_path_from = op.join(subjects_dir, src, 'surf') rr_lh, tris_lh = read_surface(op.join(surf_path_from, 'lh.white')) rr_rh, tris_rh = read_surface(op.join(surf_path_from, 'rh.white')) rr = [rr_lh, rr_rh] tris = [tris_lh, tris_rh] else: if not isinstance(src, SourceSpaces): raise TypeError('src must be a string or a set of source spaces') if len(src) != 2: raise ValueError('source space should contain the 2 hemispheres') rr = [1e3 * src[0]['rr'], 1e3 * src[1]['rr']] tris = [src[0]['tris'], src[1]['tris']] src_conn = spatial_src_adjacency(src).tocsr() labels = [] cnt = 0 cnt_full = 0 for hemi_idx, (hemi, this_vertno, this_tris, this_rr) in enumerate( zip(['lh', 'rh'], stc.vertices, tris, rr)): this_data = stc.data[cnt:cnt + len(this_vertno)] if connected: # we know src *must* be a SourceSpaces now vertno = np.where(src[hemi_idx]['inuse'])[0] if not len(np.setdiff1d(this_vertno, vertno)) == 0: raise RuntimeError('stc contains vertices not present ' 'in source space, did you morph?') tmp = np.zeros((len(vertno), this_data.shape[1])) this_vertno_idx = np.searchsorted(vertno, this_vertno) tmp[this_vertno_idx] = this_data this_data = tmp offset = cnt_full + len(this_data) this_src_adj = src_conn[cnt_full:offset, cnt_full:offset].tocoo() this_data_abs_max = np.abs(this_data).max(axis=1) clusters, _ = _find_clusters(this_data_abs_max, 0., adjacency=this_src_adj) cnt_full += len(this_data) # Then order clusters in descending order based on maximum value clusters_max = np.argsort([np.max(this_data_abs_max[c]) for c in clusters])[::-1] clusters = [clusters[k] for k in clusters_max] clusters = [vertno[c] for c in clusters] else: clusters = [this_vertno[np.any(this_data, axis=1)]] cnt += len(this_vertno) clusters = [c for c in clusters if len(c) > 0] if len(clusters) == 0: if not connected: this_labels = None else: this_labels = [] else: this_labels = [] colors = _n_colors(len(clusters)) for c, color in zip(clusters, colors): idx_use = c label = Label(idx_use, this_rr[idx_use], None, hemi, 'Label from stc', subject=subject, color=color) if smooth: label = label.fill(src) this_labels.append(label) if not connected: this_labels = this_labels[0] labels.append(this_labels) return labels def _verts_within_dist(graph, sources, max_dist): """Find all vertices wihin a maximum geodesic distance from source. Parameters ---------- graph : scipy.sparse.csr_matrix Sparse matrix with distances between adjacent vertices. sources : list of int Source vertices. max_dist : float Maximum geodesic distance. Returns ------- verts : array Vertices within max_dist. dist : array Distances from source vertex. """ dist_map = {} verts_added_last = [] for source in sources: dist_map[source] = 0 verts_added_last.append(source) # add neighbors until no more neighbors within max_dist can be found while len(verts_added_last) > 0: verts_added = [] for i in verts_added_last: v_dist = dist_map[i] row = graph[i, :] neighbor_vert = row.indices neighbor_dist = row.data for j, d in zip(neighbor_vert, neighbor_dist): n_dist = v_dist + d if j in dist_map: if n_dist < dist_map[j]: dist_map[j] = n_dist else: if n_dist <= max_dist: dist_map[j] = n_dist # we found a new vertex within max_dist verts_added.append(j) verts_added_last = verts_added verts = np.sort(np.array(list(dist_map.keys()), int)) dist = np.array([dist_map[v] for v in verts], int) return verts, dist def _grow_labels(seeds, extents, hemis, names, dist, vert, subject): """Parallelize grow_labels.""" labels = [] for seed, extent, hemi, name in zip(seeds, extents, hemis, names): label_verts, label_dist = _verts_within_dist(dist[hemi], seed, extent) # create a label if len(seed) == 1: seed_repr = str(seed) else: seed_repr = ','.join(map(str, seed)) comment = 'Circular label: seed=%s, extent=%0.1fmm' % (seed_repr, extent) label = Label(vertices=label_verts, pos=vert[hemi][label_verts], values=label_dist, hemi=hemi, comment=comment, name=str(name), subject=subject) labels.append(label) return labels @fill_doc def grow_labels(subject, seeds, extents, hemis, subjects_dir=None, n_jobs=1, overlap=True, names=None, surface='white', colors=None): """Generate circular labels in source space with region growing. This function generates a number of labels in source space by growing regions starting from the vertices defined in "seeds". For each seed, a label is generated containing all vertices within a maximum geodesic distance on the white matter surface from the seed. Parameters ---------- subject : str Name of the subject as in SUBJECTS_DIR. seeds : int | list Seed, or list of seeds. Each seed can be either a vertex number or a list of vertex numbers. extents : array | float Extents (radius in mm) of the labels. hemis : array | int Hemispheres to use for the labels (0: left, 1: right). %(subjects_dir)s %(n_jobs)s Likely only useful if tens or hundreds of labels are being expanded simultaneously. Does not apply with ``overlap=False``. overlap : bool Produce overlapping labels. If True (default), the resulting labels can be overlapping. If False, each label will be grown one step at a time, and occupied territory will not be invaded. names : None | list of str Assign names to the new labels (list needs to have the same length as seeds). surface : str The surface used to grow the labels, defaults to the white surface. colors : array, shape (n, 4) or (, 4) | None How to assign colors to each label. If None then unique colors will be chosen automatically (default), otherwise colors will be broadcast from the array. The first three values will be interpreted as RGB colors and the fourth column as the alpha value (commonly 1). Returns ------- labels : list of Label The labels' ``comment`` attribute contains information on the seed vertex and extent; the ``values`` attribute contains distance from the seed in millimeters. Notes ----- "extents" and "hemis" can either be arrays with the same length as seeds, which allows using a different extent and hemisphere for label, or integers, in which case the same extent and hemisphere is used for each label. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) n_jobs = check_n_jobs(n_jobs) # make sure the inputs are arrays if np.isscalar(seeds): seeds = [seeds] seeds = [np.atleast_1d(seed) for seed in seeds] extents = np.atleast_1d(extents) hemis = np.atleast_1d(hemis) n_seeds = len(seeds) if len(extents) != 1 and len(extents) != n_seeds: raise ValueError('The extents parameter has to be of length 1 or ' 'len(seeds)') if len(hemis) != 1 and len(hemis) != n_seeds: raise ValueError('The hemis parameter has to be of length 1 or ' 'len(seeds)') if colors is not None: if len(colors.shape) == 1: # if one color for all seeds n_colors = 1 n = colors.shape[0] else: n_colors, n = colors.shape if n_colors != n_seeds and n_colors != 1: msg = ('Number of colors (%d) and seeds (%d) are not compatible.' % (n_colors, n_seeds)) raise ValueError(msg) if n != 4: msg = 'Colors must have 4 values (RGB and alpha), not %d.' % n raise ValueError(msg) # make the arrays the same length as seeds if len(extents) == 1: extents = np.tile(extents, n_seeds) if len(hemis) == 1: hemis = np.tile(hemis, n_seeds) hemis = np.array(['lh' if h == 0 else 'rh' for h in hemis]) # names if names is None: names = ["Label_%i-%s" % items for items in enumerate(hemis)] else: if np.isscalar(names): names = [names] if len(names) != n_seeds: raise ValueError('The names parameter has to be None or have ' 'length len(seeds)') for i, hemi in enumerate(hemis): if not names[i].endswith(hemi): names[i] = '-'.join((names[i], hemi)) names = np.array(names) # load the surfaces and create the distance graphs tris, vert, dist = {}, {}, {} for hemi in set(hemis): surf_fname = op.join(subjects_dir, subject, 'surf', hemi + '.' + surface) vert[hemi], tris[hemi] = read_surface(surf_fname) dist[hemi] = mesh_dist(tris[hemi], vert[hemi]) if overlap: # create the patches parallel, my_grow_labels, _ = parallel_func(_grow_labels, n_jobs) seeds = np.array_split(np.array(seeds, dtype='O'), n_jobs) extents = np.array_split(extents, n_jobs) hemis = np.array_split(hemis, n_jobs) names = np.array_split(names, n_jobs) labels = sum(parallel(my_grow_labels(s, e, h, n, dist, vert, subject) for s, e, h, n in zip(seeds, extents, hemis, names)), []) else: # special procedure for non-overlapping labels labels = _grow_nonoverlapping_labels(subject, seeds, extents, hemis, vert, dist, names) if colors is None: # add a unique color to each label label_colors = _n_colors(len(labels)) else: # use specified colors label_colors = np.empty((len(labels), 4)) label_colors[:] = colors for label, color in zip(labels, label_colors): label.color = color return labels def _grow_nonoverlapping_labels(subject, seeds_, extents_, hemis, vertices_, graphs, names_): """Grow labels while ensuring that they don't overlap.""" labels = [] for hemi in set(hemis): hemi_index = (hemis == hemi) seeds = [seed for seed, h in zip(seeds_, hemis) if h == hemi] extents = extents_[hemi_index] names = names_[hemi_index] graph = graphs[hemi] # distance graph n_vertices = len(vertices_[hemi]) n_labels = len(seeds) # prepare parcellation parc = np.empty(n_vertices, dtype='int32') parc[:] = -1 # initialize active sources sources = {} # vert -> (label, dist_from_seed) edge = [] # queue of vertices to process for label, seed in enumerate(seeds): if np.any(parc[seed] >= 0): raise ValueError("Overlapping seeds") parc[seed] = label for s in np.atleast_1d(seed): sources[s] = (label, 0.) edge.append(s) # grow from sources while edge: vert_from = edge.pop(0) label, old_dist = sources[vert_from] # add neighbors within allowable distance row = graph[vert_from, :] for vert_to, dist in zip(row.indices, row.data): # Prevent adding a point that has already been used # (prevents infinite loop) if (vert_to == seeds[label]).any(): continue new_dist = old_dist + dist # abort if outside of extent if new_dist > extents[label]: continue vert_to_label = parc[vert_to] if vert_to_label >= 0: _, vert_to_dist = sources[vert_to] # abort if the vertex is occupied by a closer seed if new_dist > vert_to_dist: continue elif vert_to in edge: edge.remove(vert_to) # assign label value parc[vert_to] = label sources[vert_to] = (label, new_dist) edge.append(vert_to) # convert parc to labels for i in range(n_labels): vertices = np.nonzero(parc == i)[0] name = str(names[i]) label_ = Label(vertices, hemi=hemi, name=name, subject=subject) labels.append(label_) return labels @fill_doc def random_parcellation(subject, n_parcel, hemi, subjects_dir=None, surface='white', random_state=None): """Generate random cortex parcellation by growing labels. This function generates a number of labels which don't intersect and cover the whole surface. Regions are growing around randomly chosen seeds. Parameters ---------- subject : str Name of the subject as in SUBJECTS_DIR. n_parcel : int Total number of cortical parcels. hemi : str Hemisphere id (ie 'lh', 'rh', 'both'). In the case of 'both', both hemispheres are processed with (n_parcel // 2) parcels per hemisphere. %(subjects_dir)s surface : str The surface used to grow the labels, defaults to the white surface. %(random_state)s Returns ------- labels : list of Label Random cortex parcellation. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if hemi == 'both': hemi = ['lh', 'rh'] hemis = np.atleast_1d(hemi) # load the surfaces and create the distance graphs tris, vert, dist = {}, {}, {} for hemi in set(hemis): surf_fname = op.join(subjects_dir, subject, 'surf', hemi + '.' + surface) vert[hemi], tris[hemi] = read_surface(surf_fname) dist[hemi] = mesh_dist(tris[hemi], vert[hemi]) # create the patches labels = _cortex_parcellation(subject, n_parcel, hemis, vert, dist, random_state) # add a unique color to each label colors = _n_colors(len(labels)) for label, color in zip(labels, colors): label.color = color return labels def _cortex_parcellation(subject, n_parcel, hemis, vertices_, graphs, random_state=None): """Random cortex parcellation.""" labels = [] rng = check_random_state(random_state) for hemi in set(hemis): parcel_size = len(hemis) * len(vertices_[hemi]) // n_parcel graph = graphs[hemi] # distance graph n_vertices = len(vertices_[hemi]) # prepare parcellation parc = np.full(n_vertices, -1, dtype='int32') # initialize active sources s = rng.choice(range(n_vertices)) label_idx = 0 edge = [s] # queue of vertices to process parc[s] = label_idx label_size = 1 rest = len(parc) - 1 # grow from sources while rest: # if there are not free neighbors, start new parcel if not edge: rest_idx = np.where(parc < 0)[0] s = rng.choice(rest_idx) edge = [s] label_idx += 1 label_size = 1 parc[s] = label_idx rest -= 1 vert_from = edge.pop(0) # add neighbors within allowable distance # row = graph[vert_from, :] # row_indices, row_data = row.indices, row.data sl = slice(graph.indptr[vert_from], graph.indptr[vert_from + 1]) row_indices, row_data = graph.indices[sl], graph.data[sl] for vert_to, dist in zip(row_indices, row_data): vert_to_label = parc[vert_to] # abort if the vertex is already occupied if vert_to_label >= 0: continue # abort if outside of extent if label_size > parcel_size: label_idx += 1 label_size = 1 edge = [vert_to] parc[vert_to] = label_idx rest -= 1 break # assign label value parc[vert_to] = label_idx label_size += 1 edge.append(vert_to) rest -= 1 # merging small labels # label adjacency matrix n_labels = label_idx + 1 label_sizes = np.empty(n_labels, dtype=int) label_conn = np.zeros([n_labels, n_labels], dtype='bool') for i in range(n_labels): vertices = np.nonzero(parc == i)[0] label_sizes[i] = len(vertices) neighbor_vertices = graph[vertices, :].indices neighbor_labels = np.unique(np.array(parc[neighbor_vertices])) label_conn[i, neighbor_labels] = 1 np.fill_diagonal(label_conn, 0) # merging label_id = range(n_labels) while n_labels > n_parcel // len(hemis): # smallest label and its smallest neighbor i = np.argmin(label_sizes) neighbors = np.nonzero(label_conn[i, :])[0] j = neighbors[np.argmin(label_sizes[neighbors])] # merging two labels label_conn[j, :] += label_conn[i, :] label_conn[:, j] += label_conn[:, i] label_conn = np.delete(label_conn, i, 0) label_conn = np.delete(label_conn, i, 1) label_conn[j, j] = 0 label_sizes[j] += label_sizes[i] label_sizes = np.delete(label_sizes, i, 0) n_labels -= 1 vertices = np.nonzero(parc == label_id[i])[0] parc[vertices] = label_id[j] label_id = np.delete(label_id, i, 0) # convert parc to labels for i in range(n_labels): vertices = np.nonzero(parc == label_id[i])[0] name = 'label_' + str(i) label_ = Label(vertices, hemi=hemi, name=name, subject=subject) labels.append(label_) return labels def _read_annot_cands(dir_name, raise_error=True): """List the candidate parcellations.""" if not op.isdir(dir_name): if not raise_error: return list() raise IOError('Directory for annotation does not exist: %s', dir_name) cands = os.listdir(dir_name) cands = sorted(set(c.replace('lh.', '').replace('rh.', '').replace( '.annot', '') for c in cands if '.annot' in c), key=lambda x: x.lower()) # exclude .ctab files cands = [c for c in cands if '.ctab' not in c] return cands def _read_annot(fname): """Read a Freesurfer annotation from a .annot file. Note : Copied from PySurfer Parameters ---------- fname : str Path to annotation file Returns ------- annot : numpy array, shape=(n_verts) Annotation id at each vertex ctab : numpy array, shape=(n_entries, 5) RGBA + label id colortable array names : list of str List of region names as stored in the annot file """ if not op.isfile(fname): dir_name = op.split(fname)[0] cands = _read_annot_cands(dir_name) if len(cands) == 0: raise IOError('No such file %s, no candidate parcellations ' 'found in directory' % fname) else: raise IOError('No such file %s, candidate parcellations in ' 'that directory:\n%s' % (fname, '\n'.join(cands))) with open(fname, "rb") as fid: n_verts = np.fromfile(fid, '>i4', 1)[0] data = np.fromfile(fid, '>i4', n_verts * 2).reshape(n_verts, 2) annot = data[data[:, 0], 1] ctab_exists = np.fromfile(fid, '>i4', 1)[0] if not ctab_exists: raise Exception('Color table not found in annotation file') n_entries = np.fromfile(fid, '>i4', 1)[0] if n_entries > 0: length = np.fromfile(fid, '>i4', 1)[0] np.fromfile(fid, '>c', length) # discard orig_tab names = list() ctab = np.zeros((n_entries, 5), np.int64) for i in range(n_entries): name_length = np.fromfile(fid, '>i4', 1)[0] name = np.fromfile(fid, "|S%d" % name_length, 1)[0] names.append(name) ctab[i, :4] = np.fromfile(fid, '>i4', 4) ctab[i, 4] = (ctab[i, 0] + ctab[i, 1] * (2 ** 8) + ctab[i, 2] * (2 ** 16) + ctab[i, 3] * (2 ** 24)) else: ctab_version = -n_entries if ctab_version != 2: raise Exception('Color table version not supported') n_entries = np.fromfile(fid, '>i4', 1)[0] ctab = np.zeros((n_entries, 5), np.int64) length = np.fromfile(fid, '>i4', 1)[0] np.fromfile(fid, "|S%d" % length, 1) # Orig table path entries_to_read = np.fromfile(fid, '>i4', 1)[0] names = list() for i in range(entries_to_read): np.fromfile(fid, '>i4', 1) # Structure name_length = np.fromfile(fid, '>i4', 1)[0] name = np.fromfile(fid, "|S%d" % name_length, 1)[0] names.append(name) ctab[i, :4] = np.fromfile(fid, '>i4', 4) ctab[i, 4] = (ctab[i, 0] + ctab[i, 1] * (2 ** 8) + ctab[i, 2] * (2 ** 16)) # convert to more common alpha value ctab[:, 3] = 255 - ctab[:, 3] return annot, ctab, names def _get_annot_fname(annot_fname, subject, hemi, parc, subjects_dir): """Get the .annot filenames and hemispheres.""" if annot_fname is not None: # we use use the .annot file specified by the user hemis = [op.basename(annot_fname)[:2]] if hemis[0] not in ['lh', 'rh']: raise ValueError('Could not determine hemisphere from filename, ' 'filename has to start with "lh" or "rh".') annot_fname = [annot_fname] else: # construct .annot file names for requested subject, parc, hemi _check_option('hemi', hemi, ['lh', 'rh', 'both']) if hemi == 'both': hemis = ['lh', 'rh'] else: hemis = [hemi] subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) dst = op.join(subjects_dir, subject, 'label', '%%s.%s.annot' % parc) annot_fname = [dst % hemi_ for hemi_ in hemis] return annot_fname, hemis def _load_vert_pos(subject, subjects_dir, surf_name, hemi, n_expected, extra=''): fname_surf = op.join(subjects_dir, subject, 'surf', '%s.%s' % (hemi, surf_name)) vert_pos, _ = read_surface(fname_surf) vert_pos /= 1e3 # the positions in labels are in meters if len(vert_pos) != n_expected: raise RuntimeError('Number of surface vertices (%s) for subject %s' ' does not match the expected number of vertices' '(%s)%s' % (len(vert_pos), subject, n_expected, extra)) return vert_pos @verbose def read_labels_from_annot(subject, parc='aparc', hemi='both', surf_name='white', annot_fname=None, regexp=None, subjects_dir=None, sort=True, verbose=None): """Read labels from a FreeSurfer annotation file. Note: Only cortical labels will be returned. Parameters ---------- subject : str The subject for which to read the parcellation. parc : str The parcellation to use, e.g., 'aparc' or 'aparc.a2009s'. hemi : str The hemisphere from which to read the parcellation, can be 'lh', 'rh', or 'both'. surf_name : str Surface used to obtain vertex locations, e.g., 'white', 'pial'. annot_fname : str or None Filename of the .annot file. If not None, only this file is read and 'parc' and 'hemi' are ignored. regexp : str Regular expression or substring to select particular labels from the parcellation. E.g. 'superior' will return all labels in which this substring is contained. %(subjects_dir)s sort : bool If true, labels will be sorted by name before being returned. .. versionadded:: 0.21.0 %(verbose)s Returns ------- labels : list of Label The labels, sorted by label name (ascending). See Also -------- write_labels_to_annot morph_labels """ logger.info('Reading labels from parcellation...') subjects_dir = get_subjects_dir(subjects_dir) # get the .annot filenames and hemispheres annot_fname, hemis = _get_annot_fname(annot_fname, subject, hemi, parc, subjects_dir) if regexp is not None: # allow for convenient substring match r_ = (re.compile('.*%s.*' % regexp if regexp.replace('_', '').isalnum() else regexp)) # now we are ready to create the labels n_read = 0 labels = list() orig_names = set() for fname, hemi in zip(annot_fname, hemis): # read annotation annot, ctab, label_names = _read_annot(fname) label_rgbas = ctab[:, :4] / 255. label_ids = ctab[:, -1] # load the vertex positions from surface vert_pos = _load_vert_pos( subject, subjects_dir, surf_name, hemi, len(annot), extra='for annotation file %s' % fname) for label_id, label_name, label_rgba in\ zip(label_ids, label_names, label_rgbas): vertices = np.where(annot == label_id)[0] if len(vertices) == 0: # label is not part of cortical surface continue label_name = label_name.decode() orig_names.add(label_name) name = f'{label_name}-{hemi}' if (regexp is not None) and not r_.match(name): continue pos = vert_pos[vertices, :] label = Label(vertices, pos, hemi=hemi, name=name, subject=subject, color=tuple(label_rgba)) labels.append(label) n_read = len(labels) - n_read logger.info(' read %d labels from %s' % (n_read, fname)) # sort the labels by label name if sort: labels = sorted(labels, key=lambda l: l.name) if len(labels) == 0: msg = 'No labels found.' if regexp is not None: orig_names = '\n'.join(sorted(orig_names)) msg += (f' Maybe the regular expression {repr(regexp)} did not ' f'match any of:\n{orig_names}') raise RuntimeError(msg) return labels def _check_labels_subject(labels, subject, name): _validate_type(labels, (list, tuple), 'labels') for label in labels: _validate_type(label, Label, 'each entry in labels') if subject is None: subject = label.subject if subject is not None: # label.subject can be None, depending on init if subject != label.subject: raise ValueError('Got multiple values of %s: %s and %s' % (name, subject, label.subject)) if subject is None: raise ValueError('if label.subject is None for all labels, ' '%s must be provided' % name) return subject @verbose def morph_labels(labels, subject_to, subject_from=None, subjects_dir=None, surf_name='white', verbose=None): """Morph a set of labels. This is useful when morphing a set of non-overlapping labels (such as those obtained with :func:`read_labels_from_annot`) from one subject to another. Parameters ---------- labels : list The labels to morph. subject_to : str The subject to morph labels to. subject_from : str | None The subject to morph labels from. Can be None if the labels have the ``.subject`` property defined. %(subjects_dir)s surf_name : str Surface used to obtain vertex locations, e.g., 'white', 'pial'. %(verbose)s Returns ------- labels : list The morphed labels. See Also -------- read_labels_from_annot mne.Label.morph Notes ----- This does not use the same algorithm as Freesurfer, so the results morphing (e.g., from ``'fsaverage'`` to your subject) might not match what Freesurfer produces during ``recon-all``. .. versionadded:: 0.18 """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) subject_from = _check_labels_subject(labels, subject_from, 'subject_from') mmaps = read_morph_map(subject_from, subject_to, subjects_dir) vert_poss = [_load_vert_pos(subject_to, subjects_dir, surf_name, hemi, mmap.shape[0]) for hemi, mmap in zip(('lh', 'rh'), mmaps)] idxs = [mmap.argmax(axis=1) for mmap in mmaps] out_labels = list() values = filename = None for label in labels: li = dict(lh=0, rh=1)[label.hemi] vertices = np.where(np.in1d(idxs[li], label.vertices))[0] pos = vert_poss[li][vertices] out_labels.append( Label(vertices, pos, values, label.hemi, label.comment, label.name, filename, subject_to, label.color, label.verbose)) return out_labels @verbose def labels_to_stc(labels, values, tmin=0, tstep=1, subject=None, src=None, verbose=None): """Convert a set of labels and values to a STC. This function is meant to work like the opposite of `extract_label_time_course`. Parameters ---------- %(eltc_labels)s values : ndarray, shape (n_labels, ...) The values in each label. Can be 1D or 2D. tmin : float The tmin to use for the STC. tstep : float The tstep to use for the STC. subject : str | None The subject for which to create the STC. %(eltc_src)s Can be omitted if using a surface source space, in which case the label vertices will determine the output STC vertices. Required if using a volumetric source space. .. versionadded:: 0.22 %(verbose)s Returns ------- stc : instance of SourceEstimate | instance of VolSourceEstimate The values-in-labels converted to a STC. See Also -------- extract_label_time_course Notes ----- Vertices that appear in more than one label will be averaged. .. versionadded:: 0.18 """ values = np.array(values, float) if values.ndim == 1: values = values[:, np.newaxis] if values.ndim != 2: raise ValueError('values must have 1 or 2 dimensions, got %s' % (values.ndim,)) _validate_type(src, (SourceSpaces, None)) if src is None: data, vertices, subject = _labels_to_stc_surf( labels, values, tmin, tstep, subject) klass = SourceEstimate else: kind = src.kind subject = _check_subject( src._subject, subject, first_kind='source space subject', raise_error=False) _check_option('source space kind', kind, ('surface', 'volume')) if kind == 'volume': klass = VolSourceEstimate else: klass = SourceEstimate # Easiest way is to get a dot-able operator and use it vertices = [s['vertno'].copy() for s in src] stc = klass( np.eye(sum(len(v) for v in vertices)), vertices, 0, 1, subject) label_op = extract_label_time_course( stc, labels, src=src, mode='mean', allow_empty=True) _check_values_labels(values, label_op.shape[0]) rev_op = np.zeros(label_op.shape[::-1]) rev_op[np.arange(label_op.shape[1]), np.argmax(label_op, axis=0)] = 1. data = rev_op @ values return klass(data, vertices, tmin, tstep, subject, verbose) def _check_values_labels(values, n_labels): if n_labels != len(values): raise ValueError( f'values.shape[0] ({values.shape[0]}) must match the number of ' f'labels ({n_labels})') def _labels_to_stc_surf(labels, values, tmin, tstep, subject): from scipy import sparse subject = _check_labels_subject(labels, subject, 'subject') _check_values_labels(values, len(labels)) vertices = dict(lh=[], rh=[]) data = dict(lh=[], rh=[]) for li, label in enumerate(labels): data[label.hemi].append( np.repeat(values[li][np.newaxis], len(label.vertices), axis=0)) vertices[label.hemi].append(label.vertices) hemis = ('lh', 'rh') for hemi in hemis: vertices[hemi] = np.concatenate(vertices[hemi], axis=0) data[hemi] = np.concatenate(data[hemi], axis=0).astype(float) cols = np.arange(len(vertices[hemi])) vertices[hemi], rows = np.unique(vertices[hemi], return_inverse=True) mat = sparse.coo_matrix((np.ones(len(rows)), (rows, cols))).tocsr() mat = mat * sparse.diags(1. / np.asarray(mat.sum(axis=-1))[:, 0]) data[hemi] = mat.dot(data[hemi]) vertices = [vertices[hemi] for hemi in hemis] data = np.concatenate([data[hemi] for hemi in hemis], axis=0) return data, vertices, subject _DEFAULT_TABLE_NAME = 'MNE-Python Colortable' def _write_annot(fname, annot, ctab, names, table_name=_DEFAULT_TABLE_NAME): """Write a Freesurfer annotation to a .annot file.""" assert len(names) == len(ctab) with open(fname, 'wb') as fid: n_verts = len(annot) np.array(n_verts, dtype='>i4').tofile(fid) data = np.zeros((n_verts, 2), dtype='>i4') data[:, 0] = np.arange(n_verts) data[:, 1] = annot data.ravel().tofile(fid) # indicate that color table exists np.array(1, dtype='>i4').tofile(fid) # color table version 2 np.array(-2, dtype='>i4').tofile(fid) # write color table n_entries = len(ctab) np.array(n_entries, dtype='>i4').tofile(fid) # write our color table name _write_annot_str(fid, table_name) # number of entries to write np.array(n_entries, dtype='>i4').tofile(fid) # write entries for ii, (name, color) in enumerate(zip(names, ctab)): np.array(ii, dtype='>i4').tofile(fid) _write_annot_str(fid, name) np.array(color[:4], dtype='>i4').tofile(fid) def _write_annot_str(fid, s): s = s.encode('ascii') + b'\x00' np.array(len(s), '>i4').tofile(fid) fid.write(s) @verbose def write_labels_to_annot(labels, subject=None, parc=None, overwrite=False, subjects_dir=None, annot_fname=None, colormap='hsv', hemi='both', sort=True, table_name=_DEFAULT_TABLE_NAME, verbose=None): r"""Create a FreeSurfer annotation from a list of labels. Parameters ---------- labels : list with instances of mne.Label The labels to create a parcellation from. subject : str | None The subject for which to write the parcellation. parc : str | None The parcellation name to use. overwrite : bool Overwrite files if they already exist. %(subjects_dir)s annot_fname : str | None Filename of the .annot file. If not None, only this file is written and 'parc' and 'subject' are ignored. colormap : str Colormap to use to generate label colors for labels that do not have a color specified. hemi : 'both' | 'lh' | 'rh' The hemisphere(s) for which to write \*.annot files (only applies if annot_fname is not specified; default is 'both'). sort : bool If True (default), labels will be sorted by name before writing. .. versionadded:: 0.21.0 table_name : str The table name to use for the colortable. .. versionadded:: 0.21.0 %(verbose)s See Also -------- read_labels_from_annot Notes ----- Vertices that are not covered by any of the labels are assigned to a label named "unknown". """ logger.info('Writing labels to parcellation...') subjects_dir = get_subjects_dir(subjects_dir) # get the .annot filenames and hemispheres annot_fname, hemis = _get_annot_fname(annot_fname, subject, hemi, parc, subjects_dir) if not overwrite: for fname in annot_fname: if op.exists(fname): raise ValueError('File %s exists. Use "overwrite=True" to ' 'overwrite it' % fname) # prepare container for data to save: to_save = [] # keep track of issues found in the labels duplicate_colors = [] invalid_colors = [] overlap = [] no_color = (-1, -1, -1, -1) no_color_rgb = (-1, -1, -1) for hemi, fname in zip(hemis, annot_fname): hemi_labels = [label for label in labels if label.hemi == hemi] n_hemi_labels = len(hemi_labels) if n_hemi_labels == 0: ctab = np.empty((0, 4), dtype=np.int32) ctab_rgb = ctab[:, :3] else: if sort: hemi_labels.sort(key=lambda label: label.name) # convert colors to 0-255 RGBA tuples hemi_colors = [no_color if label.color is None else tuple(int(round(255 * i)) for i in label.color) for label in hemi_labels] ctab = np.array(hemi_colors, dtype=np.int32) ctab_rgb = ctab[:, :3] # make color dict (for annot ID, only R, G and B count) labels_by_color = defaultdict(list) for label, color in zip(hemi_labels, ctab_rgb): labels_by_color[tuple(color)].append(label.name) # check label colors for color, names in labels_by_color.items(): if color == no_color_rgb: continue if color == (0, 0, 0): # we cannot have an all-zero color, otherw. e.g. tksurfer # refuses to read the parcellation warn('At least one label contains a color with, "r=0, ' 'g=0, b=0" value. Some FreeSurfer tools may fail ' 'to read the parcellation') if any(i > 255 for i in color): msg = ("%s: %s (%s)" % (color, ', '.join(names), hemi)) invalid_colors.append(msg) if len(names) > 1: msg = "%s: %s (%s)" % (color, ', '.join(names), hemi) duplicate_colors.append(msg) # replace None values (labels with unspecified color) if labels_by_color[no_color_rgb]: default_colors = _n_colors(n_hemi_labels, bytes_=True, cmap=colormap) # keep track of colors known to be in hemi_colors : safe_color_i = 0 for i in range(n_hemi_labels): if ctab[i, 0] == -1: color = default_colors[i] # make sure to add no duplicate color while np.any(np.all(color[:3] == ctab_rgb, 1)): color = default_colors[safe_color_i] safe_color_i += 1 # assign the color ctab[i] = color # find number of vertices in surface if subject is not None and subjects_dir is not None: fpath = op.join(subjects_dir, subject, 'surf', '%s.white' % hemi) points, _ = read_surface(fpath) n_vertices = len(points) else: if len(hemi_labels) > 0: max_vert = max(np.max(label.vertices) for label in hemi_labels) n_vertices = max_vert + 1 else: n_vertices = 1 warn('Number of vertices in the surface could not be ' 'verified because the surface file could not be found; ' 'specify subject and subjects_dir parameters.') # Create annot and color table array to write annot = np.empty(n_vertices, dtype=np.int64) annot[:] = -1 # create the annotation ids from the colors annot_id_coding = np.array((1, 2 ** 8, 2 ** 16)) annot_ids = list(np.sum(ctab_rgb * annot_id_coding, axis=1)) for label, annot_id in zip(hemi_labels, annot_ids): # make sure the label is not overwriting another label if np.any(annot[label.vertices] != -1): other_ids = set(annot[label.vertices]) other_ids.discard(-1) other_indices = (annot_ids.index(i) for i in other_ids) other_names = (hemi_labels[i].name for i in other_indices) other_repr = ', '.join(other_names) msg = "%s: %s overlaps %s" % (hemi, label.name, other_repr) overlap.append(msg) annot[label.vertices] = annot_id hemi_names = [label.name for label in hemi_labels] if None in hemi_names: msg = ("Found %i labels with no name. Writing annotation file" "requires all labels named" % (hemi_names.count(None))) # raise the error immediately rather than crash with an # uninformative error later (e.g. cannot join NoneType) raise ValueError(msg) # Assign unlabeled vertices to an "unknown" label unlabeled = (annot == -1) if np.any(unlabeled): msg = ("Assigning %i unlabeled vertices to " "'unknown-%s'" % (unlabeled.sum(), hemi)) logger.info(msg) # find an unused color (try shades of gray first) for i in range(1, 257): if not np.any(np.all((i, i, i) == ctab_rgb, 1)): break if i < 256: color = (i, i, i, 0) else: err = ("Need one free shade of gray for 'unknown' label. " "Please modify your label colors, or assign the " "unlabeled vertices to another label.") raise ValueError(err) # find the id annot_id = np.sum(annot_id_coding * color[:3]) # update data to write annot[unlabeled] = annot_id ctab = np.vstack((ctab, color)) hemi_names.append("unknown") # convert to FreeSurfer alpha values ctab[:, 3] = 255 - ctab[:, 3] # remove hemi ending in names hemi_names = [name[:-3] if name.endswith(hemi) else name for name in hemi_names] to_save.append((fname, annot, ctab, hemi_names)) issues = [] if duplicate_colors: msg = ("Some labels have the same color values (all labels in one " "hemisphere must have a unique color):") duplicate_colors.insert(0, msg) issues.append('\n'.join(duplicate_colors)) if invalid_colors: msg = ("Some labels have invalid color values (all colors should be " "RGBA tuples with values between 0 and 1)") invalid_colors.insert(0, msg) issues.append('\n'.join(invalid_colors)) if overlap: msg = ("Some labels occupy vertices that are also occupied by one or " "more other labels. Each vertex can only be occupied by a " "single label in *.annot files.") overlap.insert(0, msg) issues.append('\n'.join(overlap)) if issues: raise ValueError('\n\n'.join(issues)) # write it for fname, annot, ctab, hemi_names in to_save: logger.info(' writing %d labels to %s' % (len(hemi_names), fname)) _write_annot(fname, annot, ctab, hemi_names, table_name) @fill_doc def select_sources(subject, label, location='center', extent=0., grow_outside=True, subjects_dir=None, name=None, random_state=None, surf='white'): """Select sources from a label. Parameters ---------- %(subject)s label : instance of Label | str Define where the seed will be chosen. If str, can be 'lh' or 'rh', which correspond to left or right hemisphere, respectively. location : 'random' | 'center' | int Location to grow label from. If the location is an int, it represents the vertex number in the corresponding label. If it is a str, it can be either 'random' or 'center'. extent : float Extents (radius in mm) of the labels, i.e. maximum geodesic distance on the white matter surface from the seed. If 0, the resulting label will contain only one vertex. grow_outside : bool Let the region grow outside the original label where location was defined. %(subjects_dir)s name : None | str Assign name to the new label. %(random_state)s surf : str The surface used to simulated the label, defaults to the white surface. Returns ------- label : instance of Label The label that contains the selected sources. Notes ----- This function selects a region of interest on the cortical surface based on a label (or a hemisphere). The sources are selected by growing a region around a seed which is selected randomly, is the center of the label, or is a specific vertex. The selected vertices can extend beyond the initial provided label. This can be prevented by setting grow_outside to False. The selected sources are returned in the form of a new Label object. The values of the label contain the distance from the seed in millimeters. .. versionadded:: 0.18 """ # If label is a string, convert it to a label that contains the whole # hemisphere. if isinstance(label, str): _check_option('label', label, ['lh', 'rh']) surf_filename = op.join(subjects_dir, subject, 'surf', label + '.white') vertices, _ = read_surface(surf_filename) indices = np.arange(len(vertices), dtype=int) label = Label(indices, vertices, hemi=label) # Choose the seed according to the selected strategy. if isinstance(location, str): _check_option('location', location, ['center', 'random']) if location == 'center': seed = label.center_of_mass( subject, restrict_vertices=True, subjects_dir=subjects_dir, surf=surf) else: rng = check_random_state(random_state) seed = rng.choice(label.vertices) else: seed = label.vertices[location] hemi = 0 if label.hemi == 'lh' else 1 new_label = grow_labels(subject, seed, extent, hemi, subjects_dir)[0] # We override the name because grow_label automatically adds a -rh or -lh # to the given parameter. new_label.name = name # Restrict the new label to the vertices of the input label if needed. if not grow_outside: to_keep = np.array([v in label.vertices for v in new_label.vertices]) new_label = Label(new_label.vertices[to_keep], new_label.pos[to_keep], hemi=new_label.hemi, name=name, subject=subject) return new_label def find_pos_in_annot(pos, subject='fsaverage', annot='aparc+aseg', subjects_dir=None): """ Find name in atlas for given MRI coordinates. Parameters ---------- pos : ndarray, shape (3,) Vector of x,y,z coordinates in MRI space. subject : str MRI subject name. annot : str MRI volumetric atlas file name. Do not include the ``.mgz`` suffix. subjects_dir : path-like Path to MRI subjects directory. Returns ------- label : str Anatomical region name from atlas. Notes ----- .. versionadded:: 0.24 """ pos = np.asarray(pos, float) if pos.shape != (3,): raise ValueError( 'pos must be an array of shape (3,), ' f'got {pos.shape}') nibabel = _import_nibabel('read MRI parcellations') if subjects_dir is None: subjects_dir = get_subjects_dir(None) atlas_fname = os.path.join(subjects_dir, subject, 'mri', annot + '.mgz') parcellation_img = nibabel.load(atlas_fname) # Load freesurface atlas LUT lut_inv_dict = read_freesurfer_lut()[0] label_lut = {v: k for k, v in lut_inv_dict.items()} # Find voxel for dipole position mri_vox_t = np.linalg.inv(parcellation_img.header.get_vox2ras_tkr()) vox_dip_pos_f = apply_trans(mri_vox_t, pos) vox_dip_pos = np.rint(vox_dip_pos_f).astype(int) # Get voxel value and label from LUT vol_values = parcellation_img.get_fdata()[tuple(vox_dip_pos.T)] label = label_lut.get(vol_values, 'Unknown') return label
# Author: Martin Luessi <mluessi@nmr.mgh.harvard.edu> # # License: Simplified BSD import os.path as op import pytest import numpy as np from numpy.testing import assert_array_almost_equal, assert_allclose import mne from mne.datasets import testing from mne import (read_cov, read_forward_solution, read_evokeds, convert_forward_solution, VectorSourceEstimate) from mne.cov import regularize from mne.inverse_sparse import gamma_map from mne.inverse_sparse.mxne_inverse import make_stc_from_dipoles from mne.minimum_norm.tests.test_inverse import (assert_stc_res, assert_var_exp_log) from mne import pick_types_forward from mne.utils import assert_stcs_equal, catch_logging from mne.dipole import Dipole data_path = testing.data_path(download=False) fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif') fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif') fname_fwd = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif') subjects_dir = op.join(data_path, 'subjects') def _check_stc(stc, evoked, idx, hemi, fwd, dist_limit=0., ratio=50., res=None, atol=1e-20): """Check correctness.""" assert_array_almost_equal(stc.times, evoked.times, 5) stc_orig = stc if isinstance(stc, VectorSourceEstimate): assert stc.data.any(1).any(1).all() # all dipoles should have some stc = stc.magnitude() amps = np.sum(stc.data ** 2, axis=1) order = np.argsort(amps)[::-1] amps = amps[order] verts = np.concatenate(stc.vertices)[order] hemi_idx = int(order[0] >= len(stc.vertices[1])) hemis = ['lh', 'rh'] assert hemis[hemi_idx] == hemi dist = np.linalg.norm(np.diff(fwd['src'][hemi_idx]['rr'][[idx, verts[0]]], axis=0)[0]) * 1000. assert dist <= dist_limit assert amps[0] > ratio * amps[1] if res is not None: assert_stc_res(evoked, stc_orig, fwd, res, atol=atol) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_standard(): """Test Gamma MAP inverse.""" forward = read_forward_solution(fname_fwd) forward = convert_forward_solution(forward, surf_ori=True) forward = pick_types_forward(forward, meg=False, eeg=True) evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.14) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) alpha = 0.5 with catch_logging() as log: stc = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, verbose=True) _check_stc(stc, evoked, 68477, 'lh', fwd=forward) assert_var_exp_log(log.getvalue(), 20, 22) with catch_logging() as log: stc_vec, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True, update_mode=1, pick_ori='vector', return_residual=True, verbose=True) assert_var_exp_log(log.getvalue(), 20, 22) assert_stcs_equal(stc_vec.magnitude(), stc) _check_stc(stc_vec, evoked, 68477, 'lh', fwd=forward, res=res) stc, res = gamma_map( evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, pick_ori='vector', return_residual=True) _check_stc(stc, evoked, 82010, 'lh', fwd=forward, dist_limit=6., ratio=2., res=res) with catch_logging() as log: dips = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=1, return_as_dipoles=True, verbose=True) exp_var = assert_var_exp_log(log.getvalue(), 58, 60) dip_exp_var = np.mean(sum(dip.gof for dip in dips)) assert_allclose(exp_var, dip_exp_var, atol=10) # not really equiv, close assert (isinstance(dips[0], Dipole)) stc_dip = make_stc_from_dipoles(dips, forward['src']) assert_stcs_equal(stc.magnitude(), stc_dip) # force fixed orientation stc, res = gamma_map(evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, loose=0, return_residual=True) _check_stc(stc, evoked, 85739, 'lh', fwd=forward, ratio=20., res=res) @pytest.mark.slowtest @testing.requires_testing_data def test_gamma_map_vol_sphere(): """Gamma MAP with a sphere forward and volumic source space.""" evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0), proj=False) evoked.resample(50, npad=100) evoked.crop(tmin=0.1, tmax=0.16) # crop to window around peak cov = read_cov(fname_cov) cov = regularize(cov, evoked.info, rank=None) info = evoked.info sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=0.080) src = mne.setup_volume_source_space(subject=None, pos=30., mri=None, sphere=(0.0, 0.0, 0.0, 0.08), bem=None, mindist=5.0, exclude=2.0, sphere_units='m') fwd = mne.make_forward_solution(info, trans=None, src=src, bem=sphere, eeg=False, meg=True) alpha = 0.5 pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0, return_residual=False) pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha, loose=0.2, return_residual=False) stc = gamma_map(evoked, fwd, cov, alpha, tol=1e-4, xyz_same_gamma=False, update_mode=2, return_residual=False) assert_array_almost_equal(stc.times, evoked.times, 5) # Compare orientation obtained using fit_dipole and gamma_map # for a simulated evoked containing a single dipole stc = mne.VolSourceEstimate(50e-9 * np.random.RandomState(42).randn(1, 4), vertices=[stc.vertices[0][:1]], tmin=stc.tmin, tstep=stc.tstep) evoked_dip = mne.simulation.simulate_evoked(fwd, stc, info, cov, nave=1e9, use_cps=True) dip_gmap = gamma_map(evoked_dip, fwd, cov, 0.1, return_as_dipoles=True) amp_max = [np.max(d.amplitude) for d in dip_gmap] dip_gmap = dip_gmap[np.argmax(amp_max)] assert (dip_gmap[0].pos[0] in src[0]['rr'][stc.vertices[0]]) dip_fit = mne.fit_dipole(evoked_dip, cov, sphere)[0] assert (np.abs(np.dot(dip_fit.ori[0], dip_gmap.ori[0])) > 0.99)
rkmaddox/mne-python
mne/inverse_sparse/tests/test_gamma_map.py
mne/label.py
from typing import List, cast import numpy as np from pandas._typing import FilePathOrBuffer, Scalar, StorageOptions from pandas.compat._optional import import_optional_dependency import pandas as pd from pandas.io.excel._base import BaseExcelReader class ODFReader(BaseExcelReader): """ Read tables out of OpenDocument formatted files. Parameters ---------- filepath_or_buffer : string, path to be parsed or an open readable stream. storage_options : dict, optional passed to fsspec for appropriate URLs (see ``_get_filepath_or_buffer``) """ def __init__( self, filepath_or_buffer: FilePathOrBuffer, storage_options: StorageOptions = None, ): import_optional_dependency("odf") super().__init__(filepath_or_buffer, storage_options=storage_options) @property def _workbook_class(self): from odf.opendocument import OpenDocument return OpenDocument def load_workbook(self, filepath_or_buffer: FilePathOrBuffer): from odf.opendocument import load return load(filepath_or_buffer) @property def empty_value(self) -> str: """Property for compat with other readers.""" return "" @property def sheet_names(self) -> List[str]: """Return a list of sheet names present in the document""" from odf.table import Table tables = self.book.getElementsByType(Table) return [t.getAttribute("name") for t in tables] def get_sheet_by_index(self, index: int): from odf.table import Table tables = self.book.getElementsByType(Table) return tables[index] def get_sheet_by_name(self, name: str): from odf.table import Table tables = self.book.getElementsByType(Table) for table in tables: if table.getAttribute("name") == name: return table self.close() raise ValueError(f"sheet {name} not found") def get_sheet_data(self, sheet, convert_float: bool) -> List[List[Scalar]]: """ Parse an ODF Table into a list of lists """ from odf.table import CoveredTableCell, TableCell, TableRow covered_cell_name = CoveredTableCell().qname table_cell_name = TableCell().qname cell_names = {covered_cell_name, table_cell_name} sheet_rows = sheet.getElementsByType(TableRow) empty_rows = 0 max_row_len = 0 table: List[List[Scalar]] = [] for i, sheet_row in enumerate(sheet_rows): sheet_cells = [x for x in sheet_row.childNodes if x.qname in cell_names] empty_cells = 0 table_row: List[Scalar] = [] for j, sheet_cell in enumerate(sheet_cells): if sheet_cell.qname == table_cell_name: value = self._get_cell_value(sheet_cell, convert_float) else: value = self.empty_value column_repeat = self._get_column_repeat(sheet_cell) # Queue up empty values, writing only if content succeeds them if value == self.empty_value: empty_cells += column_repeat else: table_row.extend([self.empty_value] * empty_cells) empty_cells = 0 table_row.extend([value] * column_repeat) if max_row_len < len(table_row): max_row_len = len(table_row) row_repeat = self._get_row_repeat(sheet_row) if self._is_empty_row(sheet_row): empty_rows += row_repeat else: # add blank rows to our table table.extend([[self.empty_value]] * empty_rows) empty_rows = 0 for _ in range(row_repeat): table.append(table_row) # Make our table square for row in table: if len(row) < max_row_len: row.extend([self.empty_value] * (max_row_len - len(row))) return table def _get_row_repeat(self, row) -> int: """ Return number of times this row was repeated Repeating an empty row appeared to be a common way of representing sparse rows in the table. """ from odf.namespaces import TABLENS return int(row.attributes.get((TABLENS, "number-rows-repeated"), 1)) def _get_column_repeat(self, cell) -> int: from odf.namespaces import TABLENS return int(cell.attributes.get((TABLENS, "number-columns-repeated"), 1)) def _is_empty_row(self, row) -> bool: """ Helper function to find empty rows """ for column in row.childNodes: if len(column.childNodes) > 0: return False return True def _get_cell_value(self, cell, convert_float: bool) -> Scalar: from odf.namespaces import OFFICENS if str(cell) == "#N/A": return np.nan cell_type = cell.attributes.get((OFFICENS, "value-type")) if cell_type == "boolean": if str(cell) == "TRUE": return True return False if cell_type is None: return self.empty_value elif cell_type == "float": # GH5394 cell_value = float(cell.attributes.get((OFFICENS, "value"))) if convert_float: val = int(cell_value) if val == cell_value: return val return cell_value elif cell_type == "percentage": cell_value = cell.attributes.get((OFFICENS, "value")) return float(cell_value) elif cell_type == "string": return self._get_cell_string_value(cell) elif cell_type == "currency": cell_value = cell.attributes.get((OFFICENS, "value")) return float(cell_value) elif cell_type == "date": cell_value = cell.attributes.get((OFFICENS, "date-value")) return pd.to_datetime(cell_value) elif cell_type == "time": result = pd.to_datetime(str(cell)) result = cast(pd.Timestamp, result) return result.time() else: self.close() raise ValueError(f"Unrecognized type {cell_type}") def _get_cell_string_value(self, cell) -> str: """ Find and decode OpenDocument text:s tags that represent a run length encoded sequence of space characters. """ from odf.element import Element from odf.namespaces import TEXTNS from odf.text import S text_s = S().qname value = [] for fragment in cell.childNodes: if isinstance(fragment, Element): if fragment.qname == text_s: spaces = int(fragment.attributes.get((TEXTNS, "c"), 1)) value.append(" " * spaces) else: # recursive impl needed in case of nested fragments # with multiple spaces # https://github.com/pandas-dev/pandas/pull/36175#discussion_r484639704 value.append(self._get_cell_string_value(fragment)) else: value.append(str(fragment)) return "".join(value)
import random import numpy as np import pytest from pandas.errors import PerformanceWarning import pandas as pd from pandas import Categorical, DataFrame, NaT, Timestamp, date_range import pandas._testing as tm class TestDataFrameSortValues: def test_sort_values(self): frame = DataFrame( [[1, 1, 2], [3, 1, 0], [4, 5, 6]], index=[1, 2, 3], columns=list("ABC") ) # by column (axis=0) sorted_df = frame.sort_values(by="A") indexer = frame["A"].argsort().values expected = frame.loc[frame.index[indexer]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by="A", ascending=False) indexer = indexer[::-1] expected = frame.loc[frame.index[indexer]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by="A", ascending=False) tm.assert_frame_equal(sorted_df, expected) # GH4839 sorted_df = frame.sort_values(by=["A"], ascending=[False]) tm.assert_frame_equal(sorted_df, expected) # multiple bys sorted_df = frame.sort_values(by=["B", "C"]) expected = frame.loc[[2, 1, 3]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=["B", "C"], ascending=False) tm.assert_frame_equal(sorted_df, expected[::-1]) sorted_df = frame.sort_values(by=["B", "A"], ascending=[True, False]) tm.assert_frame_equal(sorted_df, expected) msg = "No axis named 2 for object type DataFrame" with pytest.raises(ValueError, match=msg): frame.sort_values(by=["A", "B"], axis=2, inplace=True) # by row (axis=1): GH#10806 sorted_df = frame.sort_values(by=3, axis=1) expected = frame tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=3, axis=1, ascending=False) expected = frame.reindex(columns=["C", "B", "A"]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 2], axis="columns") expected = frame.reindex(columns=["B", "A", "C"]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 3], axis=1, ascending=[True, False]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 3], axis=1, ascending=False) expected = frame.reindex(columns=["C", "B", "A"]) tm.assert_frame_equal(sorted_df, expected) msg = r"Length of ascending \(5\) != length of by \(2\)" with pytest.raises(ValueError, match=msg): frame.sort_values(by=["A", "B"], axis=0, ascending=[True] * 5) def test_sort_values_inplace(self): frame = DataFrame( np.random.randn(4, 4), index=[1, 2, 3, 4], columns=["A", "B", "C", "D"] ) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", inplace=True) assert return_value is None expected = frame.sort_values(by="A") tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values(by=1, axis=1, inplace=True) assert return_value is None expected = frame.sort_values(by=1, axis=1) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", ascending=False, inplace=True) assert return_value is None expected = frame.sort_values(by="A", ascending=False) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by=["A", "B"], ascending=False, inplace=True ) assert return_value is None expected = frame.sort_values(by=["A", "B"], ascending=False) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_multicolumn(self): A = np.arange(5).repeat(20) B = np.tile(np.arange(5), 20) random.shuffle(A) random.shuffle(B) frame = DataFrame({"A": A, "B": B, "C": np.random.randn(100)}) result = frame.sort_values(by=["A", "B"]) indexer = np.lexsort((frame["B"], frame["A"])) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) result = frame.sort_values(by=["A", "B"], ascending=False) indexer = np.lexsort( (frame["B"].rank(ascending=False), frame["A"].rank(ascending=False)) ) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) result = frame.sort_values(by=["B", "A"]) indexer = np.lexsort((frame["A"], frame["B"])) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) def test_sort_values_multicolumn_uint64(self): # GH#9918 # uint64 multicolumn sort df = DataFrame( { "a": pd.Series([18446637057563306014, 1162265347240853609]), "b": pd.Series([1, 2]), } ) df["a"] = df["a"].astype(np.uint64) result = df.sort_values(["a", "b"]) expected = DataFrame( { "a": pd.Series([18446637057563306014, 1162265347240853609]), "b": pd.Series([1, 2]), }, index=pd.Index([1, 0]), ) tm.assert_frame_equal(result, expected) def test_sort_values_nan(self): # GH#3917 df = DataFrame( {"A": [1, 2, np.nan, 1, 6, 8, 4], "B": [9, np.nan, 5, 2, 5, 4, 5]} ) # sort one column only expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 9, 2, np.nan, 5, 5, 4]}, index=[2, 0, 3, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A"], na_position="first") tm.assert_frame_equal(sorted_df, expected) expected = DataFrame( {"A": [np.nan, 8, 6, 4, 2, 1, 1], "B": [5, 4, 5, 5, np.nan, 9, 2]}, index=[2, 5, 4, 6, 1, 0, 3], ) sorted_df = df.sort_values(["A"], na_position="first", ascending=False) tm.assert_frame_equal(sorted_df, expected) expected = df.reindex(columns=["B", "A"]) sorted_df = df.sort_values(by=1, axis=1, na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='last', order expected = DataFrame( {"A": [1, 1, 2, 4, 6, 8, np.nan], "B": [2, 9, np.nan, 5, 5, 4, 5]}, index=[3, 0, 1, 6, 4, 5, 2], ) sorted_df = df.sort_values(["A", "B"]) tm.assert_frame_equal(sorted_df, expected) # na_position='first', order expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 2, 9, np.nan, 5, 5, 4]}, index=[2, 3, 0, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A", "B"], na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='first', not order expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 9, 2, np.nan, 5, 5, 4]}, index=[2, 0, 3, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A", "B"], ascending=[1, 0], na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='last', not order expected = DataFrame( {"A": [8, 6, 4, 2, 1, 1, np.nan], "B": [4, 5, 5, np.nan, 2, 9, 5]}, index=[5, 4, 6, 1, 3, 0, 2], ) sorted_df = df.sort_values(["A", "B"], ascending=[0, 1], na_position="last") tm.assert_frame_equal(sorted_df, expected) def test_sort_values_stable_descending_sort(self): # GH#6399 df = DataFrame( [[2, "first"], [2, "second"], [1, "a"], [1, "b"]], columns=["sort_col", "order"], ) sorted_df = df.sort_values(by="sort_col", kind="mergesort", ascending=False) tm.assert_frame_equal(df, sorted_df) @pytest.mark.parametrize( "expected_idx_non_na, ascending", [ [ [3, 4, 5, 0, 1, 8, 6, 9, 7, 10, 13, 14], [True, True], ], [ [0, 3, 4, 5, 1, 8, 6, 7, 10, 13, 14, 9], [True, False], ], [ [9, 7, 10, 13, 14, 6, 8, 1, 3, 4, 5, 0], [False, True], ], [ [7, 10, 13, 14, 9, 6, 8, 1, 0, 3, 4, 5], [False, False], ], ], ) @pytest.mark.parametrize("na_position", ["first", "last"]) def test_sort_values_stable_multicolumn_sort( self, expected_idx_non_na, ascending, na_position ): # GH#38426 Clarify sort_values with mult. columns / labels is stable df = DataFrame( { "A": [1, 2, np.nan, 1, 1, 1, 6, 8, 4, 8, 8, np.nan, np.nan, 8, 8], "B": [9, np.nan, 5, 2, 2, 2, 5, 4, 5, 3, 4, np.nan, np.nan, 4, 4], } ) # All rows with NaN in col "B" only have unique values in "A", therefore, # only the rows with NaNs in "A" have to be treated individually: expected_idx = ( [11, 12, 2] + expected_idx_non_na if na_position == "first" else expected_idx_non_na + [2, 11, 12] ) expected = df.take(expected_idx) sorted_df = df.sort_values( ["A", "B"], ascending=ascending, na_position=na_position ) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_stable_categorial(self): # GH#16793 df = DataFrame({"x": Categorical(np.repeat([1, 2, 3, 4], 5), ordered=True)}) expected = df.copy() sorted_df = df.sort_values("x", kind="mergesort") tm.assert_frame_equal(sorted_df, expected) def test_sort_values_datetimes(self): # GH#3461, argsort / lexsort differences for a datetime column df = DataFrame( ["a", "a", "a", "b", "c", "d", "e", "f", "g"], columns=["A"], index=date_range("20130101", periods=9), ) dts = [ Timestamp(x) for x in [ "2004-02-11", "2004-01-21", "2004-01-26", "2005-09-20", "2010-10-04", "2009-05-12", "2008-11-12", "2010-09-28", "2010-09-28", ] ] df["B"] = dts[::2] + dts[1::2] df["C"] = 2.0 df["A1"] = 3.0 df1 = df.sort_values(by="A") df2 = df.sort_values(by=["A"]) tm.assert_frame_equal(df1, df2) df1 = df.sort_values(by="B") df2 = df.sort_values(by=["B"]) tm.assert_frame_equal(df1, df2) df1 = df.sort_values(by="B") df2 = df.sort_values(by=["C", "B"]) tm.assert_frame_equal(df1, df2) def test_sort_values_frame_column_inplace_sort_exception(self, float_frame): s = float_frame["A"] with pytest.raises(ValueError, match="This Series is a view"): s.sort_values(inplace=True) cp = s.copy() cp.sort_values() # it works! def test_sort_values_nat_values_in_int_column(self): # GH#14922: "sorting with large float and multiple columns incorrect" # cause was that the int64 value NaT was considered as "na". Which is # only correct for datetime64 columns. int_values = (2, int(NaT)) float_values = (2.0, -1.797693e308) df = DataFrame( {"int": int_values, "float": float_values}, columns=["int", "float"] ) df_reversed = DataFrame( {"int": int_values[::-1], "float": float_values[::-1]}, columns=["int", "float"], index=[1, 0], ) # NaT is not a "na" for int64 columns, so na_position must not # influence the result: df_sorted = df.sort_values(["int", "float"], na_position="last") tm.assert_frame_equal(df_sorted, df_reversed) df_sorted = df.sort_values(["int", "float"], na_position="first") tm.assert_frame_equal(df_sorted, df_reversed) # reverse sorting order df_sorted = df.sort_values(["int", "float"], ascending=False) tm.assert_frame_equal(df_sorted, df) # and now check if NaT is still considered as "na" for datetime64 # columns: df = DataFrame( {"datetime": [Timestamp("2016-01-01"), NaT], "float": float_values}, columns=["datetime", "float"], ) df_reversed = DataFrame( {"datetime": [NaT, Timestamp("2016-01-01")], "float": float_values[::-1]}, columns=["datetime", "float"], index=[1, 0], ) df_sorted = df.sort_values(["datetime", "float"], na_position="first") tm.assert_frame_equal(df_sorted, df_reversed) df_sorted = df.sort_values(["datetime", "float"], na_position="last") tm.assert_frame_equal(df_sorted, df) # Ascending should not affect the results. df_sorted = df.sort_values(["datetime", "float"], ascending=False) tm.assert_frame_equal(df_sorted, df) def test_sort_nat(self): # GH 16836 d1 = [Timestamp(x) for x in ["2016-01-01", "2015-01-01", np.nan, "2016-01-01"]] d2 = [ Timestamp(x) for x in ["2017-01-01", "2014-01-01", "2016-01-01", "2015-01-01"] ] df = DataFrame({"a": d1, "b": d2}, index=[0, 1, 2, 3]) d3 = [Timestamp(x) for x in ["2015-01-01", "2016-01-01", "2016-01-01", np.nan]] d4 = [ Timestamp(x) for x in ["2014-01-01", "2015-01-01", "2017-01-01", "2016-01-01"] ] expected = DataFrame({"a": d3, "b": d4}, index=[1, 3, 0, 2]) sorted_df = df.sort_values(by=["a", "b"]) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_na_position_with_categories(self): # GH#22556 # Positioning missing value properly when column is Categorical. categories = ["A", "B", "C"] category_indices = [0, 2, 4] list_of_nans = [np.nan, np.nan] na_indices = [1, 3] na_position_first = "first" na_position_last = "last" column_name = "c" reversed_categories = sorted(categories, reverse=True) reversed_category_indices = sorted(category_indices, reverse=True) reversed_na_indices = sorted(na_indices) df = DataFrame( { column_name: Categorical( ["A", np.nan, "B", np.nan, "C"], categories=categories, ordered=True ) } ) # sort ascending with na first result = df.sort_values( by=column_name, ascending=True, na_position=na_position_first ) expected = DataFrame( { column_name: Categorical( list_of_nans + categories, categories=categories, ordered=True ) }, index=na_indices + category_indices, ) tm.assert_frame_equal(result, expected) # sort ascending with na last result = df.sort_values( by=column_name, ascending=True, na_position=na_position_last ) expected = DataFrame( { column_name: Categorical( categories + list_of_nans, categories=categories, ordered=True ) }, index=category_indices + na_indices, ) tm.assert_frame_equal(result, expected) # sort descending with na first result = df.sort_values( by=column_name, ascending=False, na_position=na_position_first ) expected = DataFrame( { column_name: Categorical( list_of_nans + reversed_categories, categories=categories, ordered=True, ) }, index=reversed_na_indices + reversed_category_indices, ) tm.assert_frame_equal(result, expected) # sort descending with na last result = df.sort_values( by=column_name, ascending=False, na_position=na_position_last ) expected = DataFrame( { column_name: Categorical( reversed_categories + list_of_nans, categories=categories, ordered=True, ) }, index=reversed_category_indices + reversed_na_indices, ) tm.assert_frame_equal(result, expected) def test_sort_values_nat(self): # GH#16836 d1 = [Timestamp(x) for x in ["2016-01-01", "2015-01-01", np.nan, "2016-01-01"]] d2 = [ Timestamp(x) for x in ["2017-01-01", "2014-01-01", "2016-01-01", "2015-01-01"] ] df = DataFrame({"a": d1, "b": d2}, index=[0, 1, 2, 3]) d3 = [Timestamp(x) for x in ["2015-01-01", "2016-01-01", "2016-01-01", np.nan]] d4 = [ Timestamp(x) for x in ["2014-01-01", "2015-01-01", "2017-01-01", "2016-01-01"] ] expected = DataFrame({"a": d3, "b": d4}, index=[1, 3, 0, 2]) sorted_df = df.sort_values(by=["a", "b"]) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_na_position_with_categories_raises(self): df = DataFrame( { "c": Categorical( ["A", np.nan, "B", np.nan, "C"], categories=["A", "B", "C"], ordered=True, ) } ) with pytest.raises(ValueError, match="invalid na_position: bad_position"): df.sort_values(by="c", ascending=False, na_position="bad_position") @pytest.mark.parametrize("inplace", [True, False]) @pytest.mark.parametrize( "original_dict, sorted_dict, ignore_index, output_index", [ ({"A": [1, 2, 3]}, {"A": [3, 2, 1]}, True, [0, 1, 2]), ({"A": [1, 2, 3]}, {"A": [3, 2, 1]}, False, [2, 1, 0]), ( {"A": [1, 2, 3], "B": [2, 3, 4]}, {"A": [3, 2, 1], "B": [4, 3, 2]}, True, [0, 1, 2], ), ( {"A": [1, 2, 3], "B": [2, 3, 4]}, {"A": [3, 2, 1], "B": [4, 3, 2]}, False, [2, 1, 0], ), ], ) def test_sort_values_ignore_index( self, inplace, original_dict, sorted_dict, ignore_index, output_index ): # GH 30114 df = DataFrame(original_dict) expected = DataFrame(sorted_dict, index=output_index) kwargs = {"ignore_index": ignore_index, "inplace": inplace} if inplace: result_df = df.copy() result_df.sort_values("A", ascending=False, **kwargs) else: result_df = df.sort_values("A", ascending=False, **kwargs) tm.assert_frame_equal(result_df, expected) tm.assert_frame_equal(df, DataFrame(original_dict)) def test_sort_values_nat_na_position_default(self): # GH 13230 expected = DataFrame( { "A": [1, 2, 3, 4, 4], "date": pd.DatetimeIndex( [ "2010-01-01 09:00:00", "2010-01-01 09:00:01", "2010-01-01 09:00:02", "2010-01-01 09:00:03", "NaT", ] ), } ) result = expected.sort_values(["A", "date"]) tm.assert_frame_equal(result, expected) def test_sort_values_item_cache(self): # previous behavior incorrect retained an invalid _item_cache entry df = DataFrame(np.random.randn(4, 3), columns=["A", "B", "C"]) df["D"] = df["A"] * 2 ser = df["A"] assert len(df._mgr.blocks) == 2 df.sort_values(by="A") ser.values[0] = 99 assert df.iloc[0, 0] == df["A"][0] class TestDataFrameSortKey: # test key sorting (issue 27237) def test_sort_values_inplace_key(self, sort_by_key): frame = DataFrame( np.random.randn(4, 4), index=[1, 2, 3, 4], columns=["A", "B", "C", "D"] ) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", inplace=True, key=sort_by_key) assert return_value is None expected = frame.sort_values(by="A", key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by=1, axis=1, inplace=True, key=sort_by_key ) assert return_value is None expected = frame.sort_values(by=1, axis=1, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by="A", ascending=False, inplace=True, key=sort_by_key ) assert return_value is None expected = frame.sort_values(by="A", ascending=False, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() sorted_df.sort_values( by=["A", "B"], ascending=False, inplace=True, key=sort_by_key ) expected = frame.sort_values(by=["A", "B"], ascending=False, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_key(self): df = DataFrame(np.array([0, 5, np.nan, 3, 2, np.nan])) result = df.sort_values(0) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(0, key=lambda x: x + 5) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(0, key=lambda x: -x, ascending=False) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_by_key(self): df = DataFrame( { "a": np.array([0, 3, np.nan, 3, 2, np.nan]), "b": np.array([0, 2, np.nan, 5, 2, np.nan]), } ) result = df.sort_values("a", key=lambda x: -x) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=lambda x: -x) expected = df.iloc[[3, 1, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=lambda x: -x, ascending=False) expected = df.iloc[[0, 4, 1, 3, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_by_key_by_name(self): df = DataFrame( { "a": np.array([0, 3, np.nan, 3, 2, np.nan]), "b": np.array([0, 2, np.nan, 5, 2, np.nan]), } ) def key(col): if col.name == "a": return -col else: return col result = df.sort_values(by="a", key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a"], key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by="b", key=key) expected = df.iloc[[0, 1, 4, 3, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_key_string(self): df = DataFrame(np.array([["hello", "goodbye"], ["hello", "Hello"]])) result = df.sort_values(1) expected = df[::-1] tm.assert_frame_equal(result, expected) result = df.sort_values([0, 1], key=lambda col: col.str.lower()) tm.assert_frame_equal(result, df) result = df.sort_values( [0, 1], key=lambda col: col.str.lower(), ascending=False ) expected = df.sort_values(1, key=lambda col: col.str.lower(), ascending=False) tm.assert_frame_equal(result, expected) def test_sort_values_key_empty(self, sort_by_key): df = DataFrame(np.array([])) df.sort_values(0, key=sort_by_key) df.sort_index(key=sort_by_key) def test_changes_length_raises(self): df = DataFrame({"A": [1, 2, 3]}) with pytest.raises(ValueError, match="change the shape"): df.sort_values("A", key=lambda x: x[:1]) def test_sort_values_key_axes(self): df = DataFrame({0: ["Hello", "goodbye"], 1: [0, 1]}) result = df.sort_values(0, key=lambda col: col.str.lower()) expected = df[::-1] tm.assert_frame_equal(result, expected) result = df.sort_values(1, key=lambda col: -col) expected = df[::-1] tm.assert_frame_equal(result, expected) def test_sort_values_key_dict_axis(self): df = DataFrame({0: ["Hello", 0], 1: ["goodbye", 1]}) result = df.sort_values(0, key=lambda col: col.str.lower(), axis=1) expected = df.loc[:, ::-1] tm.assert_frame_equal(result, expected) result = df.sort_values(1, key=lambda col: -col, axis=1) expected = df.loc[:, ::-1] tm.assert_frame_equal(result, expected) @pytest.mark.parametrize("ordered", [True, False]) def test_sort_values_key_casts_to_categorical(self, ordered): # https://github.com/pandas-dev/pandas/issues/36383 categories = ["c", "b", "a"] df = DataFrame({"x": [1, 1, 1], "y": ["a", "b", "c"]}) def sorter(key): if key.name == "y": return pd.Series( Categorical(key, categories=categories, ordered=ordered) ) return key result = df.sort_values(by=["x", "y"], key=sorter) expected = DataFrame( {"x": [1, 1, 1], "y": ["c", "b", "a"]}, index=pd.Index([2, 1, 0]) ) tm.assert_frame_equal(result, expected) @pytest.fixture def df_none(): return DataFrame( { "outer": ["a", "a", "a", "b", "b", "b"], "inner": [1, 2, 2, 2, 1, 1], "A": np.arange(6, 0, -1), ("B", 5): ["one", "one", "two", "two", "one", "one"], } ) @pytest.fixture(params=[["outer"], ["outer", "inner"]]) def df_idx(request, df_none): levels = request.param return df_none.set_index(levels) @pytest.fixture( params=[ "inner", # index level ["outer"], # list of index level "A", # column [("B", 5)], # list of column ["inner", "outer"], # two index levels [("B", 5), "outer"], # index level and column ["A", ("B", 5)], # Two columns ["inner", "outer"], # two index levels and column ] ) def sort_names(request): return request.param @pytest.fixture(params=[True, False]) def ascending(request): return request.param class TestSortValuesLevelAsStr: def test_sort_index_level_and_column_label( self, df_none, df_idx, sort_names, ascending ): # GH#14353 # Get index levels from df_idx levels = df_idx.index.names # Compute expected by sorting on columns and the setting index expected = df_none.sort_values( by=sort_names, ascending=ascending, axis=0 ).set_index(levels) # Compute result sorting on mix on columns and index levels result = df_idx.sort_values(by=sort_names, ascending=ascending, axis=0) tm.assert_frame_equal(result, expected) def test_sort_column_level_and_index_label( self, df_none, df_idx, sort_names, ascending ): # GH#14353 # Get levels from df_idx levels = df_idx.index.names # Compute expected by sorting on axis=0, setting index levels, and then # transposing. For some cases this will result in a frame with # multiple column levels expected = ( df_none.sort_values(by=sort_names, ascending=ascending, axis=0) .set_index(levels) .T ) # Compute result by transposing and sorting on axis=1. result = df_idx.T.sort_values(by=sort_names, ascending=ascending, axis=1) if len(levels) > 1: # Accessing multi-level columns that are not lexsorted raises a # performance warning with tm.assert_produces_warning(PerformanceWarning, check_stacklevel=False): tm.assert_frame_equal(result, expected) else: tm.assert_frame_equal(result, expected)
jreback/pandas
pandas/tests/frame/methods/test_sort_values.py
pandas/io/excel/_odfreader.py
""" Helpers for configuring locale settings. Name `localization` is chosen to avoid overlap with builtin `locale` module. """ from contextlib import contextmanager import locale import re import subprocess from pandas._config.config import options @contextmanager def set_locale(new_locale, lc_var: int = locale.LC_ALL): """ Context manager for temporarily setting a locale. Parameters ---------- new_locale : str or tuple A string of the form <language_country>.<encoding>. For example to set the current locale to US English with a UTF8 encoding, you would pass "en_US.UTF-8". lc_var : int, default `locale.LC_ALL` The category of the locale being set. Notes ----- This is useful when you want to run a particular block of code under a particular locale, without globally setting the locale. This probably isn't thread-safe. """ current_locale = locale.getlocale() try: locale.setlocale(lc_var, new_locale) normalized_locale = locale.getlocale() if all(x is not None for x in normalized_locale): yield ".".join(normalized_locale) else: yield new_locale finally: locale.setlocale(lc_var, current_locale) def can_set_locale(lc: str, lc_var: int = locale.LC_ALL) -> bool: """ Check to see if we can set a locale, and subsequently get the locale, without raising an Exception. Parameters ---------- lc : str The locale to attempt to set. lc_var : int, default `locale.LC_ALL` The category of the locale being set. Returns ------- bool Whether the passed locale can be set """ try: with set_locale(lc, lc_var=lc_var): pass except (ValueError, locale.Error): # horrible name for a Exception subclass return False else: return True def _valid_locales(locales, normalize): """ Return a list of normalized locales that do not throw an ``Exception`` when set. Parameters ---------- locales : str A string where each locale is separated by a newline. normalize : bool Whether to call ``locale.normalize`` on each locale. Returns ------- valid_locales : list A list of valid locales. """ return [ loc for loc in ( locale.normalize(loc.strip()) if normalize else loc.strip() for loc in locales ) if can_set_locale(loc) ] def _default_locale_getter(): return subprocess.check_output(["locale -a"], shell=True) def get_locales(prefix=None, normalize=True, locale_getter=_default_locale_getter): """ Get all the locales that are available on the system. Parameters ---------- prefix : str If not ``None`` then return only those locales with the prefix provided. For example to get all English language locales (those that start with ``"en"``), pass ``prefix="en"``. normalize : bool Call ``locale.normalize`` on the resulting list of available locales. If ``True``, only locales that can be set without throwing an ``Exception`` are returned. locale_getter : callable The function to use to retrieve the current locales. This should return a string with each locale separated by a newline character. Returns ------- locales : list of strings A list of locale strings that can be set with ``locale.setlocale()``. For example:: locale.setlocale(locale.LC_ALL, locale_string) On error will return None (no locale available, e.g. Windows) """ try: raw_locales = locale_getter() except subprocess.CalledProcessError: # Raised on (some? all?) Windows platforms because Note: "locale -a" # is not defined return None try: # raw_locales is "\n" separated list of locales # it may contain non-decodable parts, so split # extract what we can and then rejoin. raw_locales = raw_locales.split(b"\n") out_locales = [] for x in raw_locales: try: out_locales.append(str(x, encoding=options.display.encoding)) except UnicodeError: # 'locale -a' is used to populated 'raw_locales' and on # Redhat 7 Linux (and maybe others) prints locale names # using windows-1252 encoding. Bug only triggered by # a few special characters and when there is an # extensive list of installed locales. out_locales.append(str(x, encoding="windows-1252")) except TypeError: pass if prefix is None: return _valid_locales(out_locales, normalize) pattern = re.compile(f"{prefix}.*") found = pattern.findall("\n".join(out_locales)) return _valid_locales(found, normalize)
import random import numpy as np import pytest from pandas.errors import PerformanceWarning import pandas as pd from pandas import Categorical, DataFrame, NaT, Timestamp, date_range import pandas._testing as tm class TestDataFrameSortValues: def test_sort_values(self): frame = DataFrame( [[1, 1, 2], [3, 1, 0], [4, 5, 6]], index=[1, 2, 3], columns=list("ABC") ) # by column (axis=0) sorted_df = frame.sort_values(by="A") indexer = frame["A"].argsort().values expected = frame.loc[frame.index[indexer]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by="A", ascending=False) indexer = indexer[::-1] expected = frame.loc[frame.index[indexer]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by="A", ascending=False) tm.assert_frame_equal(sorted_df, expected) # GH4839 sorted_df = frame.sort_values(by=["A"], ascending=[False]) tm.assert_frame_equal(sorted_df, expected) # multiple bys sorted_df = frame.sort_values(by=["B", "C"]) expected = frame.loc[[2, 1, 3]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=["B", "C"], ascending=False) tm.assert_frame_equal(sorted_df, expected[::-1]) sorted_df = frame.sort_values(by=["B", "A"], ascending=[True, False]) tm.assert_frame_equal(sorted_df, expected) msg = "No axis named 2 for object type DataFrame" with pytest.raises(ValueError, match=msg): frame.sort_values(by=["A", "B"], axis=2, inplace=True) # by row (axis=1): GH#10806 sorted_df = frame.sort_values(by=3, axis=1) expected = frame tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=3, axis=1, ascending=False) expected = frame.reindex(columns=["C", "B", "A"]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 2], axis="columns") expected = frame.reindex(columns=["B", "A", "C"]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 3], axis=1, ascending=[True, False]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 3], axis=1, ascending=False) expected = frame.reindex(columns=["C", "B", "A"]) tm.assert_frame_equal(sorted_df, expected) msg = r"Length of ascending \(5\) != length of by \(2\)" with pytest.raises(ValueError, match=msg): frame.sort_values(by=["A", "B"], axis=0, ascending=[True] * 5) def test_sort_values_inplace(self): frame = DataFrame( np.random.randn(4, 4), index=[1, 2, 3, 4], columns=["A", "B", "C", "D"] ) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", inplace=True) assert return_value is None expected = frame.sort_values(by="A") tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values(by=1, axis=1, inplace=True) assert return_value is None expected = frame.sort_values(by=1, axis=1) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", ascending=False, inplace=True) assert return_value is None expected = frame.sort_values(by="A", ascending=False) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by=["A", "B"], ascending=False, inplace=True ) assert return_value is None expected = frame.sort_values(by=["A", "B"], ascending=False) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_multicolumn(self): A = np.arange(5).repeat(20) B = np.tile(np.arange(5), 20) random.shuffle(A) random.shuffle(B) frame = DataFrame({"A": A, "B": B, "C": np.random.randn(100)}) result = frame.sort_values(by=["A", "B"]) indexer = np.lexsort((frame["B"], frame["A"])) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) result = frame.sort_values(by=["A", "B"], ascending=False) indexer = np.lexsort( (frame["B"].rank(ascending=False), frame["A"].rank(ascending=False)) ) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) result = frame.sort_values(by=["B", "A"]) indexer = np.lexsort((frame["A"], frame["B"])) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) def test_sort_values_multicolumn_uint64(self): # GH#9918 # uint64 multicolumn sort df = DataFrame( { "a": pd.Series([18446637057563306014, 1162265347240853609]), "b": pd.Series([1, 2]), } ) df["a"] = df["a"].astype(np.uint64) result = df.sort_values(["a", "b"]) expected = DataFrame( { "a": pd.Series([18446637057563306014, 1162265347240853609]), "b": pd.Series([1, 2]), }, index=pd.Index([1, 0]), ) tm.assert_frame_equal(result, expected) def test_sort_values_nan(self): # GH#3917 df = DataFrame( {"A": [1, 2, np.nan, 1, 6, 8, 4], "B": [9, np.nan, 5, 2, 5, 4, 5]} ) # sort one column only expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 9, 2, np.nan, 5, 5, 4]}, index=[2, 0, 3, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A"], na_position="first") tm.assert_frame_equal(sorted_df, expected) expected = DataFrame( {"A": [np.nan, 8, 6, 4, 2, 1, 1], "B": [5, 4, 5, 5, np.nan, 9, 2]}, index=[2, 5, 4, 6, 1, 0, 3], ) sorted_df = df.sort_values(["A"], na_position="first", ascending=False) tm.assert_frame_equal(sorted_df, expected) expected = df.reindex(columns=["B", "A"]) sorted_df = df.sort_values(by=1, axis=1, na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='last', order expected = DataFrame( {"A": [1, 1, 2, 4, 6, 8, np.nan], "B": [2, 9, np.nan, 5, 5, 4, 5]}, index=[3, 0, 1, 6, 4, 5, 2], ) sorted_df = df.sort_values(["A", "B"]) tm.assert_frame_equal(sorted_df, expected) # na_position='first', order expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 2, 9, np.nan, 5, 5, 4]}, index=[2, 3, 0, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A", "B"], na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='first', not order expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 9, 2, np.nan, 5, 5, 4]}, index=[2, 0, 3, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A", "B"], ascending=[1, 0], na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='last', not order expected = DataFrame( {"A": [8, 6, 4, 2, 1, 1, np.nan], "B": [4, 5, 5, np.nan, 2, 9, 5]}, index=[5, 4, 6, 1, 3, 0, 2], ) sorted_df = df.sort_values(["A", "B"], ascending=[0, 1], na_position="last") tm.assert_frame_equal(sorted_df, expected) def test_sort_values_stable_descending_sort(self): # GH#6399 df = DataFrame( [[2, "first"], [2, "second"], [1, "a"], [1, "b"]], columns=["sort_col", "order"], ) sorted_df = df.sort_values(by="sort_col", kind="mergesort", ascending=False) tm.assert_frame_equal(df, sorted_df) @pytest.mark.parametrize( "expected_idx_non_na, ascending", [ [ [3, 4, 5, 0, 1, 8, 6, 9, 7, 10, 13, 14], [True, True], ], [ [0, 3, 4, 5, 1, 8, 6, 7, 10, 13, 14, 9], [True, False], ], [ [9, 7, 10, 13, 14, 6, 8, 1, 3, 4, 5, 0], [False, True], ], [ [7, 10, 13, 14, 9, 6, 8, 1, 0, 3, 4, 5], [False, False], ], ], ) @pytest.mark.parametrize("na_position", ["first", "last"]) def test_sort_values_stable_multicolumn_sort( self, expected_idx_non_na, ascending, na_position ): # GH#38426 Clarify sort_values with mult. columns / labels is stable df = DataFrame( { "A": [1, 2, np.nan, 1, 1, 1, 6, 8, 4, 8, 8, np.nan, np.nan, 8, 8], "B": [9, np.nan, 5, 2, 2, 2, 5, 4, 5, 3, 4, np.nan, np.nan, 4, 4], } ) # All rows with NaN in col "B" only have unique values in "A", therefore, # only the rows with NaNs in "A" have to be treated individually: expected_idx = ( [11, 12, 2] + expected_idx_non_na if na_position == "first" else expected_idx_non_na + [2, 11, 12] ) expected = df.take(expected_idx) sorted_df = df.sort_values( ["A", "B"], ascending=ascending, na_position=na_position ) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_stable_categorial(self): # GH#16793 df = DataFrame({"x": Categorical(np.repeat([1, 2, 3, 4], 5), ordered=True)}) expected = df.copy() sorted_df = df.sort_values("x", kind="mergesort") tm.assert_frame_equal(sorted_df, expected) def test_sort_values_datetimes(self): # GH#3461, argsort / lexsort differences for a datetime column df = DataFrame( ["a", "a", "a", "b", "c", "d", "e", "f", "g"], columns=["A"], index=date_range("20130101", periods=9), ) dts = [ Timestamp(x) for x in [ "2004-02-11", "2004-01-21", "2004-01-26", "2005-09-20", "2010-10-04", "2009-05-12", "2008-11-12", "2010-09-28", "2010-09-28", ] ] df["B"] = dts[::2] + dts[1::2] df["C"] = 2.0 df["A1"] = 3.0 df1 = df.sort_values(by="A") df2 = df.sort_values(by=["A"]) tm.assert_frame_equal(df1, df2) df1 = df.sort_values(by="B") df2 = df.sort_values(by=["B"]) tm.assert_frame_equal(df1, df2) df1 = df.sort_values(by="B") df2 = df.sort_values(by=["C", "B"]) tm.assert_frame_equal(df1, df2) def test_sort_values_frame_column_inplace_sort_exception(self, float_frame): s = float_frame["A"] with pytest.raises(ValueError, match="This Series is a view"): s.sort_values(inplace=True) cp = s.copy() cp.sort_values() # it works! def test_sort_values_nat_values_in_int_column(self): # GH#14922: "sorting with large float and multiple columns incorrect" # cause was that the int64 value NaT was considered as "na". Which is # only correct for datetime64 columns. int_values = (2, int(NaT)) float_values = (2.0, -1.797693e308) df = DataFrame( {"int": int_values, "float": float_values}, columns=["int", "float"] ) df_reversed = DataFrame( {"int": int_values[::-1], "float": float_values[::-1]}, columns=["int", "float"], index=[1, 0], ) # NaT is not a "na" for int64 columns, so na_position must not # influence the result: df_sorted = df.sort_values(["int", "float"], na_position="last") tm.assert_frame_equal(df_sorted, df_reversed) df_sorted = df.sort_values(["int", "float"], na_position="first") tm.assert_frame_equal(df_sorted, df_reversed) # reverse sorting order df_sorted = df.sort_values(["int", "float"], ascending=False) tm.assert_frame_equal(df_sorted, df) # and now check if NaT is still considered as "na" for datetime64 # columns: df = DataFrame( {"datetime": [Timestamp("2016-01-01"), NaT], "float": float_values}, columns=["datetime", "float"], ) df_reversed = DataFrame( {"datetime": [NaT, Timestamp("2016-01-01")], "float": float_values[::-1]}, columns=["datetime", "float"], index=[1, 0], ) df_sorted = df.sort_values(["datetime", "float"], na_position="first") tm.assert_frame_equal(df_sorted, df_reversed) df_sorted = df.sort_values(["datetime", "float"], na_position="last") tm.assert_frame_equal(df_sorted, df) # Ascending should not affect the results. df_sorted = df.sort_values(["datetime", "float"], ascending=False) tm.assert_frame_equal(df_sorted, df) def test_sort_nat(self): # GH 16836 d1 = [Timestamp(x) for x in ["2016-01-01", "2015-01-01", np.nan, "2016-01-01"]] d2 = [ Timestamp(x) for x in ["2017-01-01", "2014-01-01", "2016-01-01", "2015-01-01"] ] df = DataFrame({"a": d1, "b": d2}, index=[0, 1, 2, 3]) d3 = [Timestamp(x) for x in ["2015-01-01", "2016-01-01", "2016-01-01", np.nan]] d4 = [ Timestamp(x) for x in ["2014-01-01", "2015-01-01", "2017-01-01", "2016-01-01"] ] expected = DataFrame({"a": d3, "b": d4}, index=[1, 3, 0, 2]) sorted_df = df.sort_values(by=["a", "b"]) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_na_position_with_categories(self): # GH#22556 # Positioning missing value properly when column is Categorical. categories = ["A", "B", "C"] category_indices = [0, 2, 4] list_of_nans = [np.nan, np.nan] na_indices = [1, 3] na_position_first = "first" na_position_last = "last" column_name = "c" reversed_categories = sorted(categories, reverse=True) reversed_category_indices = sorted(category_indices, reverse=True) reversed_na_indices = sorted(na_indices) df = DataFrame( { column_name: Categorical( ["A", np.nan, "B", np.nan, "C"], categories=categories, ordered=True ) } ) # sort ascending with na first result = df.sort_values( by=column_name, ascending=True, na_position=na_position_first ) expected = DataFrame( { column_name: Categorical( list_of_nans + categories, categories=categories, ordered=True ) }, index=na_indices + category_indices, ) tm.assert_frame_equal(result, expected) # sort ascending with na last result = df.sort_values( by=column_name, ascending=True, na_position=na_position_last ) expected = DataFrame( { column_name: Categorical( categories + list_of_nans, categories=categories, ordered=True ) }, index=category_indices + na_indices, ) tm.assert_frame_equal(result, expected) # sort descending with na first result = df.sort_values( by=column_name, ascending=False, na_position=na_position_first ) expected = DataFrame( { column_name: Categorical( list_of_nans + reversed_categories, categories=categories, ordered=True, ) }, index=reversed_na_indices + reversed_category_indices, ) tm.assert_frame_equal(result, expected) # sort descending with na last result = df.sort_values( by=column_name, ascending=False, na_position=na_position_last ) expected = DataFrame( { column_name: Categorical( reversed_categories + list_of_nans, categories=categories, ordered=True, ) }, index=reversed_category_indices + reversed_na_indices, ) tm.assert_frame_equal(result, expected) def test_sort_values_nat(self): # GH#16836 d1 = [Timestamp(x) for x in ["2016-01-01", "2015-01-01", np.nan, "2016-01-01"]] d2 = [ Timestamp(x) for x in ["2017-01-01", "2014-01-01", "2016-01-01", "2015-01-01"] ] df = DataFrame({"a": d1, "b": d2}, index=[0, 1, 2, 3]) d3 = [Timestamp(x) for x in ["2015-01-01", "2016-01-01", "2016-01-01", np.nan]] d4 = [ Timestamp(x) for x in ["2014-01-01", "2015-01-01", "2017-01-01", "2016-01-01"] ] expected = DataFrame({"a": d3, "b": d4}, index=[1, 3, 0, 2]) sorted_df = df.sort_values(by=["a", "b"]) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_na_position_with_categories_raises(self): df = DataFrame( { "c": Categorical( ["A", np.nan, "B", np.nan, "C"], categories=["A", "B", "C"], ordered=True, ) } ) with pytest.raises(ValueError, match="invalid na_position: bad_position"): df.sort_values(by="c", ascending=False, na_position="bad_position") @pytest.mark.parametrize("inplace", [True, False]) @pytest.mark.parametrize( "original_dict, sorted_dict, ignore_index, output_index", [ ({"A": [1, 2, 3]}, {"A": [3, 2, 1]}, True, [0, 1, 2]), ({"A": [1, 2, 3]}, {"A": [3, 2, 1]}, False, [2, 1, 0]), ( {"A": [1, 2, 3], "B": [2, 3, 4]}, {"A": [3, 2, 1], "B": [4, 3, 2]}, True, [0, 1, 2], ), ( {"A": [1, 2, 3], "B": [2, 3, 4]}, {"A": [3, 2, 1], "B": [4, 3, 2]}, False, [2, 1, 0], ), ], ) def test_sort_values_ignore_index( self, inplace, original_dict, sorted_dict, ignore_index, output_index ): # GH 30114 df = DataFrame(original_dict) expected = DataFrame(sorted_dict, index=output_index) kwargs = {"ignore_index": ignore_index, "inplace": inplace} if inplace: result_df = df.copy() result_df.sort_values("A", ascending=False, **kwargs) else: result_df = df.sort_values("A", ascending=False, **kwargs) tm.assert_frame_equal(result_df, expected) tm.assert_frame_equal(df, DataFrame(original_dict)) def test_sort_values_nat_na_position_default(self): # GH 13230 expected = DataFrame( { "A": [1, 2, 3, 4, 4], "date": pd.DatetimeIndex( [ "2010-01-01 09:00:00", "2010-01-01 09:00:01", "2010-01-01 09:00:02", "2010-01-01 09:00:03", "NaT", ] ), } ) result = expected.sort_values(["A", "date"]) tm.assert_frame_equal(result, expected) def test_sort_values_item_cache(self): # previous behavior incorrect retained an invalid _item_cache entry df = DataFrame(np.random.randn(4, 3), columns=["A", "B", "C"]) df["D"] = df["A"] * 2 ser = df["A"] assert len(df._mgr.blocks) == 2 df.sort_values(by="A") ser.values[0] = 99 assert df.iloc[0, 0] == df["A"][0] class TestDataFrameSortKey: # test key sorting (issue 27237) def test_sort_values_inplace_key(self, sort_by_key): frame = DataFrame( np.random.randn(4, 4), index=[1, 2, 3, 4], columns=["A", "B", "C", "D"] ) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", inplace=True, key=sort_by_key) assert return_value is None expected = frame.sort_values(by="A", key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by=1, axis=1, inplace=True, key=sort_by_key ) assert return_value is None expected = frame.sort_values(by=1, axis=1, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by="A", ascending=False, inplace=True, key=sort_by_key ) assert return_value is None expected = frame.sort_values(by="A", ascending=False, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() sorted_df.sort_values( by=["A", "B"], ascending=False, inplace=True, key=sort_by_key ) expected = frame.sort_values(by=["A", "B"], ascending=False, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_key(self): df = DataFrame(np.array([0, 5, np.nan, 3, 2, np.nan])) result = df.sort_values(0) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(0, key=lambda x: x + 5) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(0, key=lambda x: -x, ascending=False) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_by_key(self): df = DataFrame( { "a": np.array([0, 3, np.nan, 3, 2, np.nan]), "b": np.array([0, 2, np.nan, 5, 2, np.nan]), } ) result = df.sort_values("a", key=lambda x: -x) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=lambda x: -x) expected = df.iloc[[3, 1, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=lambda x: -x, ascending=False) expected = df.iloc[[0, 4, 1, 3, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_by_key_by_name(self): df = DataFrame( { "a": np.array([0, 3, np.nan, 3, 2, np.nan]), "b": np.array([0, 2, np.nan, 5, 2, np.nan]), } ) def key(col): if col.name == "a": return -col else: return col result = df.sort_values(by="a", key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a"], key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by="b", key=key) expected = df.iloc[[0, 1, 4, 3, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_key_string(self): df = DataFrame(np.array([["hello", "goodbye"], ["hello", "Hello"]])) result = df.sort_values(1) expected = df[::-1] tm.assert_frame_equal(result, expected) result = df.sort_values([0, 1], key=lambda col: col.str.lower()) tm.assert_frame_equal(result, df) result = df.sort_values( [0, 1], key=lambda col: col.str.lower(), ascending=False ) expected = df.sort_values(1, key=lambda col: col.str.lower(), ascending=False) tm.assert_frame_equal(result, expected) def test_sort_values_key_empty(self, sort_by_key): df = DataFrame(np.array([])) df.sort_values(0, key=sort_by_key) df.sort_index(key=sort_by_key) def test_changes_length_raises(self): df = DataFrame({"A": [1, 2, 3]}) with pytest.raises(ValueError, match="change the shape"): df.sort_values("A", key=lambda x: x[:1]) def test_sort_values_key_axes(self): df = DataFrame({0: ["Hello", "goodbye"], 1: [0, 1]}) result = df.sort_values(0, key=lambda col: col.str.lower()) expected = df[::-1] tm.assert_frame_equal(result, expected) result = df.sort_values(1, key=lambda col: -col) expected = df[::-1] tm.assert_frame_equal(result, expected) def test_sort_values_key_dict_axis(self): df = DataFrame({0: ["Hello", 0], 1: ["goodbye", 1]}) result = df.sort_values(0, key=lambda col: col.str.lower(), axis=1) expected = df.loc[:, ::-1] tm.assert_frame_equal(result, expected) result = df.sort_values(1, key=lambda col: -col, axis=1) expected = df.loc[:, ::-1] tm.assert_frame_equal(result, expected) @pytest.mark.parametrize("ordered", [True, False]) def test_sort_values_key_casts_to_categorical(self, ordered): # https://github.com/pandas-dev/pandas/issues/36383 categories = ["c", "b", "a"] df = DataFrame({"x": [1, 1, 1], "y": ["a", "b", "c"]}) def sorter(key): if key.name == "y": return pd.Series( Categorical(key, categories=categories, ordered=ordered) ) return key result = df.sort_values(by=["x", "y"], key=sorter) expected = DataFrame( {"x": [1, 1, 1], "y": ["c", "b", "a"]}, index=pd.Index([2, 1, 0]) ) tm.assert_frame_equal(result, expected) @pytest.fixture def df_none(): return DataFrame( { "outer": ["a", "a", "a", "b", "b", "b"], "inner": [1, 2, 2, 2, 1, 1], "A": np.arange(6, 0, -1), ("B", 5): ["one", "one", "two", "two", "one", "one"], } ) @pytest.fixture(params=[["outer"], ["outer", "inner"]]) def df_idx(request, df_none): levels = request.param return df_none.set_index(levels) @pytest.fixture( params=[ "inner", # index level ["outer"], # list of index level "A", # column [("B", 5)], # list of column ["inner", "outer"], # two index levels [("B", 5), "outer"], # index level and column ["A", ("B", 5)], # Two columns ["inner", "outer"], # two index levels and column ] ) def sort_names(request): return request.param @pytest.fixture(params=[True, False]) def ascending(request): return request.param class TestSortValuesLevelAsStr: def test_sort_index_level_and_column_label( self, df_none, df_idx, sort_names, ascending ): # GH#14353 # Get index levels from df_idx levels = df_idx.index.names # Compute expected by sorting on columns and the setting index expected = df_none.sort_values( by=sort_names, ascending=ascending, axis=0 ).set_index(levels) # Compute result sorting on mix on columns and index levels result = df_idx.sort_values(by=sort_names, ascending=ascending, axis=0) tm.assert_frame_equal(result, expected) def test_sort_column_level_and_index_label( self, df_none, df_idx, sort_names, ascending ): # GH#14353 # Get levels from df_idx levels = df_idx.index.names # Compute expected by sorting on axis=0, setting index levels, and then # transposing. For some cases this will result in a frame with # multiple column levels expected = ( df_none.sort_values(by=sort_names, ascending=ascending, axis=0) .set_index(levels) .T ) # Compute result by transposing and sorting on axis=1. result = df_idx.T.sort_values(by=sort_names, ascending=ascending, axis=1) if len(levels) > 1: # Accessing multi-level columns that are not lexsorted raises a # performance warning with tm.assert_produces_warning(PerformanceWarning, check_stacklevel=False): tm.assert_frame_equal(result, expected) else: tm.assert_frame_equal(result, expected)
jreback/pandas
pandas/tests/frame/methods/test_sort_values.py
pandas/_config/localization.py
from typing import Optional, Type import pytest import pandas as pd import pandas._testing as tm from pandas.core import ops from .base import BaseExtensionTests class BaseOpsUtil(BaseExtensionTests): def get_op_from_name(self, op_name): return tm.get_op_from_name(op_name) def check_opname(self, s, op_name, other, exc=Exception): op = self.get_op_from_name(op_name) self._check_op(s, op, other, op_name, exc) def _check_op(self, s, op, other, op_name, exc=NotImplementedError): if exc is None: result = op(s, other) if isinstance(s, pd.DataFrame): if len(s.columns) != 1: raise NotImplementedError expected = s.iloc[:, 0].combine(other, op).to_frame() self.assert_frame_equal(result, expected) else: expected = s.combine(other, op) self.assert_series_equal(result, expected) else: with pytest.raises(exc): op(s, other) def _check_divmod_op(self, s, op, other, exc=Exception): # divmod has multiple return values, so check separately if exc is None: result_div, result_mod = op(s, other) if op is divmod: expected_div, expected_mod = s // other, s % other else: expected_div, expected_mod = other // s, other % s self.assert_series_equal(result_div, expected_div) self.assert_series_equal(result_mod, expected_mod) else: with pytest.raises(exc): divmod(s, other) class BaseArithmeticOpsTests(BaseOpsUtil): """ Various Series and DataFrame arithmetic ops methods. Subclasses supporting various ops should set the class variables to indicate that they support ops of that kind * series_scalar_exc = TypeError * frame_scalar_exc = TypeError * series_array_exc = TypeError * divmod_exc = TypeError """ series_scalar_exc: Optional[Type[TypeError]] = TypeError frame_scalar_exc: Optional[Type[TypeError]] = TypeError series_array_exc: Optional[Type[TypeError]] = TypeError divmod_exc: Optional[Type[TypeError]] = TypeError def test_arith_series_with_scalar(self, data, all_arithmetic_operators): # series & scalar op_name = all_arithmetic_operators s = pd.Series(data) self.check_opname(s, op_name, s.iloc[0], exc=self.series_scalar_exc) @pytest.mark.xfail(run=False, reason="_reduce needs implementation") def test_arith_frame_with_scalar(self, data, all_arithmetic_operators): # frame & scalar op_name = all_arithmetic_operators df = pd.DataFrame({"A": data}) self.check_opname(df, op_name, data[0], exc=self.frame_scalar_exc) def test_arith_series_with_array(self, data, all_arithmetic_operators): # ndarray & other series op_name = all_arithmetic_operators s = pd.Series(data) self.check_opname( s, op_name, pd.Series([s.iloc[0]] * len(s)), exc=self.series_array_exc ) def test_divmod(self, data): s = pd.Series(data) self._check_divmod_op(s, divmod, 1, exc=self.divmod_exc) self._check_divmod_op(1, ops.rdivmod, s, exc=self.divmod_exc) def test_divmod_series_array(self, data, data_for_twos): s = pd.Series(data) self._check_divmod_op(s, divmod, data) other = data_for_twos self._check_divmod_op(other, ops.rdivmod, s) other = pd.Series(other) self._check_divmod_op(other, ops.rdivmod, s) def test_add_series_with_extension_array(self, data): s = pd.Series(data) result = s + data expected = pd.Series(data + data) self.assert_series_equal(result, expected) def test_error(self, data, all_arithmetic_operators): # invalid ops op_name = all_arithmetic_operators with pytest.raises(AttributeError): getattr(data, op_name) @pytest.mark.parametrize("box", [pd.Series, pd.DataFrame]) def test_direct_arith_with_ndframe_returns_not_implemented(self, data, box): # EAs should return NotImplemented for ops with Series/DataFrame # Pandas takes care of unboxing the series and calling the EA's op. other = pd.Series(data) if box is pd.DataFrame: other = other.to_frame() if hasattr(data, "__add__"): result = data.__add__(other) assert result is NotImplemented else: raise pytest.skip(f"{type(data).__name__} does not implement add") class BaseComparisonOpsTests(BaseOpsUtil): """Various Series and DataFrame comparison ops methods.""" def _compare_other(self, s, data, op_name, other): op = self.get_op_from_name(op_name) if op_name == "__eq__": assert not op(s, other).all() elif op_name == "__ne__": assert op(s, other).all() else: # array assert getattr(data, op_name)(other) is NotImplemented # series s = pd.Series(data) with pytest.raises(TypeError): op(s, other) def test_compare_scalar(self, data, all_compare_operators): op_name = all_compare_operators s = pd.Series(data) self._compare_other(s, data, op_name, 0) def test_compare_array(self, data, all_compare_operators): op_name = all_compare_operators s = pd.Series(data) other = pd.Series([data[0]] * len(data)) self._compare_other(s, data, op_name, other) @pytest.mark.parametrize("box", [pd.Series, pd.DataFrame]) def test_direct_arith_with_ndframe_returns_not_implemented(self, data, box): # EAs should return NotImplemented for ops with Series/DataFrame # Pandas takes care of unboxing the series and calling the EA's op. other = pd.Series(data) if box is pd.DataFrame: other = other.to_frame() if hasattr(data, "__eq__"): result = data.__eq__(other) assert result is NotImplemented else: raise pytest.skip(f"{type(data).__name__} does not implement __eq__") if hasattr(data, "__ne__"): result = data.__ne__(other) assert result is NotImplemented else: raise pytest.skip(f"{type(data).__name__} does not implement __ne__") class BaseUnaryOpsTests(BaseOpsUtil): def test_invert(self, data): s = pd.Series(data, name="name") result = ~s expected = pd.Series(~data, name="name") self.assert_series_equal(result, expected)
import random import numpy as np import pytest from pandas.errors import PerformanceWarning import pandas as pd from pandas import Categorical, DataFrame, NaT, Timestamp, date_range import pandas._testing as tm class TestDataFrameSortValues: def test_sort_values(self): frame = DataFrame( [[1, 1, 2], [3, 1, 0], [4, 5, 6]], index=[1, 2, 3], columns=list("ABC") ) # by column (axis=0) sorted_df = frame.sort_values(by="A") indexer = frame["A"].argsort().values expected = frame.loc[frame.index[indexer]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by="A", ascending=False) indexer = indexer[::-1] expected = frame.loc[frame.index[indexer]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by="A", ascending=False) tm.assert_frame_equal(sorted_df, expected) # GH4839 sorted_df = frame.sort_values(by=["A"], ascending=[False]) tm.assert_frame_equal(sorted_df, expected) # multiple bys sorted_df = frame.sort_values(by=["B", "C"]) expected = frame.loc[[2, 1, 3]] tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=["B", "C"], ascending=False) tm.assert_frame_equal(sorted_df, expected[::-1]) sorted_df = frame.sort_values(by=["B", "A"], ascending=[True, False]) tm.assert_frame_equal(sorted_df, expected) msg = "No axis named 2 for object type DataFrame" with pytest.raises(ValueError, match=msg): frame.sort_values(by=["A", "B"], axis=2, inplace=True) # by row (axis=1): GH#10806 sorted_df = frame.sort_values(by=3, axis=1) expected = frame tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=3, axis=1, ascending=False) expected = frame.reindex(columns=["C", "B", "A"]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 2], axis="columns") expected = frame.reindex(columns=["B", "A", "C"]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 3], axis=1, ascending=[True, False]) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.sort_values(by=[1, 3], axis=1, ascending=False) expected = frame.reindex(columns=["C", "B", "A"]) tm.assert_frame_equal(sorted_df, expected) msg = r"Length of ascending \(5\) != length of by \(2\)" with pytest.raises(ValueError, match=msg): frame.sort_values(by=["A", "B"], axis=0, ascending=[True] * 5) def test_sort_values_inplace(self): frame = DataFrame( np.random.randn(4, 4), index=[1, 2, 3, 4], columns=["A", "B", "C", "D"] ) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", inplace=True) assert return_value is None expected = frame.sort_values(by="A") tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values(by=1, axis=1, inplace=True) assert return_value is None expected = frame.sort_values(by=1, axis=1) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", ascending=False, inplace=True) assert return_value is None expected = frame.sort_values(by="A", ascending=False) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by=["A", "B"], ascending=False, inplace=True ) assert return_value is None expected = frame.sort_values(by=["A", "B"], ascending=False) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_multicolumn(self): A = np.arange(5).repeat(20) B = np.tile(np.arange(5), 20) random.shuffle(A) random.shuffle(B) frame = DataFrame({"A": A, "B": B, "C": np.random.randn(100)}) result = frame.sort_values(by=["A", "B"]) indexer = np.lexsort((frame["B"], frame["A"])) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) result = frame.sort_values(by=["A", "B"], ascending=False) indexer = np.lexsort( (frame["B"].rank(ascending=False), frame["A"].rank(ascending=False)) ) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) result = frame.sort_values(by=["B", "A"]) indexer = np.lexsort((frame["A"], frame["B"])) expected = frame.take(indexer) tm.assert_frame_equal(result, expected) def test_sort_values_multicolumn_uint64(self): # GH#9918 # uint64 multicolumn sort df = DataFrame( { "a": pd.Series([18446637057563306014, 1162265347240853609]), "b": pd.Series([1, 2]), } ) df["a"] = df["a"].astype(np.uint64) result = df.sort_values(["a", "b"]) expected = DataFrame( { "a": pd.Series([18446637057563306014, 1162265347240853609]), "b": pd.Series([1, 2]), }, index=pd.Index([1, 0]), ) tm.assert_frame_equal(result, expected) def test_sort_values_nan(self): # GH#3917 df = DataFrame( {"A": [1, 2, np.nan, 1, 6, 8, 4], "B": [9, np.nan, 5, 2, 5, 4, 5]} ) # sort one column only expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 9, 2, np.nan, 5, 5, 4]}, index=[2, 0, 3, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A"], na_position="first") tm.assert_frame_equal(sorted_df, expected) expected = DataFrame( {"A": [np.nan, 8, 6, 4, 2, 1, 1], "B": [5, 4, 5, 5, np.nan, 9, 2]}, index=[2, 5, 4, 6, 1, 0, 3], ) sorted_df = df.sort_values(["A"], na_position="first", ascending=False) tm.assert_frame_equal(sorted_df, expected) expected = df.reindex(columns=["B", "A"]) sorted_df = df.sort_values(by=1, axis=1, na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='last', order expected = DataFrame( {"A": [1, 1, 2, 4, 6, 8, np.nan], "B": [2, 9, np.nan, 5, 5, 4, 5]}, index=[3, 0, 1, 6, 4, 5, 2], ) sorted_df = df.sort_values(["A", "B"]) tm.assert_frame_equal(sorted_df, expected) # na_position='first', order expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 2, 9, np.nan, 5, 5, 4]}, index=[2, 3, 0, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A", "B"], na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='first', not order expected = DataFrame( {"A": [np.nan, 1, 1, 2, 4, 6, 8], "B": [5, 9, 2, np.nan, 5, 5, 4]}, index=[2, 0, 3, 1, 6, 4, 5], ) sorted_df = df.sort_values(["A", "B"], ascending=[1, 0], na_position="first") tm.assert_frame_equal(sorted_df, expected) # na_position='last', not order expected = DataFrame( {"A": [8, 6, 4, 2, 1, 1, np.nan], "B": [4, 5, 5, np.nan, 2, 9, 5]}, index=[5, 4, 6, 1, 3, 0, 2], ) sorted_df = df.sort_values(["A", "B"], ascending=[0, 1], na_position="last") tm.assert_frame_equal(sorted_df, expected) def test_sort_values_stable_descending_sort(self): # GH#6399 df = DataFrame( [[2, "first"], [2, "second"], [1, "a"], [1, "b"]], columns=["sort_col", "order"], ) sorted_df = df.sort_values(by="sort_col", kind="mergesort", ascending=False) tm.assert_frame_equal(df, sorted_df) @pytest.mark.parametrize( "expected_idx_non_na, ascending", [ [ [3, 4, 5, 0, 1, 8, 6, 9, 7, 10, 13, 14], [True, True], ], [ [0, 3, 4, 5, 1, 8, 6, 7, 10, 13, 14, 9], [True, False], ], [ [9, 7, 10, 13, 14, 6, 8, 1, 3, 4, 5, 0], [False, True], ], [ [7, 10, 13, 14, 9, 6, 8, 1, 0, 3, 4, 5], [False, False], ], ], ) @pytest.mark.parametrize("na_position", ["first", "last"]) def test_sort_values_stable_multicolumn_sort( self, expected_idx_non_na, ascending, na_position ): # GH#38426 Clarify sort_values with mult. columns / labels is stable df = DataFrame( { "A": [1, 2, np.nan, 1, 1, 1, 6, 8, 4, 8, 8, np.nan, np.nan, 8, 8], "B": [9, np.nan, 5, 2, 2, 2, 5, 4, 5, 3, 4, np.nan, np.nan, 4, 4], } ) # All rows with NaN in col "B" only have unique values in "A", therefore, # only the rows with NaNs in "A" have to be treated individually: expected_idx = ( [11, 12, 2] + expected_idx_non_na if na_position == "first" else expected_idx_non_na + [2, 11, 12] ) expected = df.take(expected_idx) sorted_df = df.sort_values( ["A", "B"], ascending=ascending, na_position=na_position ) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_stable_categorial(self): # GH#16793 df = DataFrame({"x": Categorical(np.repeat([1, 2, 3, 4], 5), ordered=True)}) expected = df.copy() sorted_df = df.sort_values("x", kind="mergesort") tm.assert_frame_equal(sorted_df, expected) def test_sort_values_datetimes(self): # GH#3461, argsort / lexsort differences for a datetime column df = DataFrame( ["a", "a", "a", "b", "c", "d", "e", "f", "g"], columns=["A"], index=date_range("20130101", periods=9), ) dts = [ Timestamp(x) for x in [ "2004-02-11", "2004-01-21", "2004-01-26", "2005-09-20", "2010-10-04", "2009-05-12", "2008-11-12", "2010-09-28", "2010-09-28", ] ] df["B"] = dts[::2] + dts[1::2] df["C"] = 2.0 df["A1"] = 3.0 df1 = df.sort_values(by="A") df2 = df.sort_values(by=["A"]) tm.assert_frame_equal(df1, df2) df1 = df.sort_values(by="B") df2 = df.sort_values(by=["B"]) tm.assert_frame_equal(df1, df2) df1 = df.sort_values(by="B") df2 = df.sort_values(by=["C", "B"]) tm.assert_frame_equal(df1, df2) def test_sort_values_frame_column_inplace_sort_exception(self, float_frame): s = float_frame["A"] with pytest.raises(ValueError, match="This Series is a view"): s.sort_values(inplace=True) cp = s.copy() cp.sort_values() # it works! def test_sort_values_nat_values_in_int_column(self): # GH#14922: "sorting with large float and multiple columns incorrect" # cause was that the int64 value NaT was considered as "na". Which is # only correct for datetime64 columns. int_values = (2, int(NaT)) float_values = (2.0, -1.797693e308) df = DataFrame( {"int": int_values, "float": float_values}, columns=["int", "float"] ) df_reversed = DataFrame( {"int": int_values[::-1], "float": float_values[::-1]}, columns=["int", "float"], index=[1, 0], ) # NaT is not a "na" for int64 columns, so na_position must not # influence the result: df_sorted = df.sort_values(["int", "float"], na_position="last") tm.assert_frame_equal(df_sorted, df_reversed) df_sorted = df.sort_values(["int", "float"], na_position="first") tm.assert_frame_equal(df_sorted, df_reversed) # reverse sorting order df_sorted = df.sort_values(["int", "float"], ascending=False) tm.assert_frame_equal(df_sorted, df) # and now check if NaT is still considered as "na" for datetime64 # columns: df = DataFrame( {"datetime": [Timestamp("2016-01-01"), NaT], "float": float_values}, columns=["datetime", "float"], ) df_reversed = DataFrame( {"datetime": [NaT, Timestamp("2016-01-01")], "float": float_values[::-1]}, columns=["datetime", "float"], index=[1, 0], ) df_sorted = df.sort_values(["datetime", "float"], na_position="first") tm.assert_frame_equal(df_sorted, df_reversed) df_sorted = df.sort_values(["datetime", "float"], na_position="last") tm.assert_frame_equal(df_sorted, df) # Ascending should not affect the results. df_sorted = df.sort_values(["datetime", "float"], ascending=False) tm.assert_frame_equal(df_sorted, df) def test_sort_nat(self): # GH 16836 d1 = [Timestamp(x) for x in ["2016-01-01", "2015-01-01", np.nan, "2016-01-01"]] d2 = [ Timestamp(x) for x in ["2017-01-01", "2014-01-01", "2016-01-01", "2015-01-01"] ] df = DataFrame({"a": d1, "b": d2}, index=[0, 1, 2, 3]) d3 = [Timestamp(x) for x in ["2015-01-01", "2016-01-01", "2016-01-01", np.nan]] d4 = [ Timestamp(x) for x in ["2014-01-01", "2015-01-01", "2017-01-01", "2016-01-01"] ] expected = DataFrame({"a": d3, "b": d4}, index=[1, 3, 0, 2]) sorted_df = df.sort_values(by=["a", "b"]) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_na_position_with_categories(self): # GH#22556 # Positioning missing value properly when column is Categorical. categories = ["A", "B", "C"] category_indices = [0, 2, 4] list_of_nans = [np.nan, np.nan] na_indices = [1, 3] na_position_first = "first" na_position_last = "last" column_name = "c" reversed_categories = sorted(categories, reverse=True) reversed_category_indices = sorted(category_indices, reverse=True) reversed_na_indices = sorted(na_indices) df = DataFrame( { column_name: Categorical( ["A", np.nan, "B", np.nan, "C"], categories=categories, ordered=True ) } ) # sort ascending with na first result = df.sort_values( by=column_name, ascending=True, na_position=na_position_first ) expected = DataFrame( { column_name: Categorical( list_of_nans + categories, categories=categories, ordered=True ) }, index=na_indices + category_indices, ) tm.assert_frame_equal(result, expected) # sort ascending with na last result = df.sort_values( by=column_name, ascending=True, na_position=na_position_last ) expected = DataFrame( { column_name: Categorical( categories + list_of_nans, categories=categories, ordered=True ) }, index=category_indices + na_indices, ) tm.assert_frame_equal(result, expected) # sort descending with na first result = df.sort_values( by=column_name, ascending=False, na_position=na_position_first ) expected = DataFrame( { column_name: Categorical( list_of_nans + reversed_categories, categories=categories, ordered=True, ) }, index=reversed_na_indices + reversed_category_indices, ) tm.assert_frame_equal(result, expected) # sort descending with na last result = df.sort_values( by=column_name, ascending=False, na_position=na_position_last ) expected = DataFrame( { column_name: Categorical( reversed_categories + list_of_nans, categories=categories, ordered=True, ) }, index=reversed_category_indices + reversed_na_indices, ) tm.assert_frame_equal(result, expected) def test_sort_values_nat(self): # GH#16836 d1 = [Timestamp(x) for x in ["2016-01-01", "2015-01-01", np.nan, "2016-01-01"]] d2 = [ Timestamp(x) for x in ["2017-01-01", "2014-01-01", "2016-01-01", "2015-01-01"] ] df = DataFrame({"a": d1, "b": d2}, index=[0, 1, 2, 3]) d3 = [Timestamp(x) for x in ["2015-01-01", "2016-01-01", "2016-01-01", np.nan]] d4 = [ Timestamp(x) for x in ["2014-01-01", "2015-01-01", "2017-01-01", "2016-01-01"] ] expected = DataFrame({"a": d3, "b": d4}, index=[1, 3, 0, 2]) sorted_df = df.sort_values(by=["a", "b"]) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_na_position_with_categories_raises(self): df = DataFrame( { "c": Categorical( ["A", np.nan, "B", np.nan, "C"], categories=["A", "B", "C"], ordered=True, ) } ) with pytest.raises(ValueError, match="invalid na_position: bad_position"): df.sort_values(by="c", ascending=False, na_position="bad_position") @pytest.mark.parametrize("inplace", [True, False]) @pytest.mark.parametrize( "original_dict, sorted_dict, ignore_index, output_index", [ ({"A": [1, 2, 3]}, {"A": [3, 2, 1]}, True, [0, 1, 2]), ({"A": [1, 2, 3]}, {"A": [3, 2, 1]}, False, [2, 1, 0]), ( {"A": [1, 2, 3], "B": [2, 3, 4]}, {"A": [3, 2, 1], "B": [4, 3, 2]}, True, [0, 1, 2], ), ( {"A": [1, 2, 3], "B": [2, 3, 4]}, {"A": [3, 2, 1], "B": [4, 3, 2]}, False, [2, 1, 0], ), ], ) def test_sort_values_ignore_index( self, inplace, original_dict, sorted_dict, ignore_index, output_index ): # GH 30114 df = DataFrame(original_dict) expected = DataFrame(sorted_dict, index=output_index) kwargs = {"ignore_index": ignore_index, "inplace": inplace} if inplace: result_df = df.copy() result_df.sort_values("A", ascending=False, **kwargs) else: result_df = df.sort_values("A", ascending=False, **kwargs) tm.assert_frame_equal(result_df, expected) tm.assert_frame_equal(df, DataFrame(original_dict)) def test_sort_values_nat_na_position_default(self): # GH 13230 expected = DataFrame( { "A": [1, 2, 3, 4, 4], "date": pd.DatetimeIndex( [ "2010-01-01 09:00:00", "2010-01-01 09:00:01", "2010-01-01 09:00:02", "2010-01-01 09:00:03", "NaT", ] ), } ) result = expected.sort_values(["A", "date"]) tm.assert_frame_equal(result, expected) def test_sort_values_item_cache(self): # previous behavior incorrect retained an invalid _item_cache entry df = DataFrame(np.random.randn(4, 3), columns=["A", "B", "C"]) df["D"] = df["A"] * 2 ser = df["A"] assert len(df._mgr.blocks) == 2 df.sort_values(by="A") ser.values[0] = 99 assert df.iloc[0, 0] == df["A"][0] class TestDataFrameSortKey: # test key sorting (issue 27237) def test_sort_values_inplace_key(self, sort_by_key): frame = DataFrame( np.random.randn(4, 4), index=[1, 2, 3, 4], columns=["A", "B", "C", "D"] ) sorted_df = frame.copy() return_value = sorted_df.sort_values(by="A", inplace=True, key=sort_by_key) assert return_value is None expected = frame.sort_values(by="A", key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by=1, axis=1, inplace=True, key=sort_by_key ) assert return_value is None expected = frame.sort_values(by=1, axis=1, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() return_value = sorted_df.sort_values( by="A", ascending=False, inplace=True, key=sort_by_key ) assert return_value is None expected = frame.sort_values(by="A", ascending=False, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) sorted_df = frame.copy() sorted_df.sort_values( by=["A", "B"], ascending=False, inplace=True, key=sort_by_key ) expected = frame.sort_values(by=["A", "B"], ascending=False, key=sort_by_key) tm.assert_frame_equal(sorted_df, expected) def test_sort_values_key(self): df = DataFrame(np.array([0, 5, np.nan, 3, 2, np.nan])) result = df.sort_values(0) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(0, key=lambda x: x + 5) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(0, key=lambda x: -x, ascending=False) expected = df.iloc[[0, 4, 3, 1, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_by_key(self): df = DataFrame( { "a": np.array([0, 3, np.nan, 3, 2, np.nan]), "b": np.array([0, 2, np.nan, 5, 2, np.nan]), } ) result = df.sort_values("a", key=lambda x: -x) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=lambda x: -x) expected = df.iloc[[3, 1, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=lambda x: -x, ascending=False) expected = df.iloc[[0, 4, 1, 3, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_by_key_by_name(self): df = DataFrame( { "a": np.array([0, 3, np.nan, 3, 2, np.nan]), "b": np.array([0, 2, np.nan, 5, 2, np.nan]), } ) def key(col): if col.name == "a": return -col else: return col result = df.sort_values(by="a", key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a"], key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by="b", key=key) expected = df.iloc[[0, 1, 4, 3, 2, 5]] tm.assert_frame_equal(result, expected) result = df.sort_values(by=["a", "b"], key=key) expected = df.iloc[[1, 3, 4, 0, 2, 5]] tm.assert_frame_equal(result, expected) def test_sort_values_key_string(self): df = DataFrame(np.array([["hello", "goodbye"], ["hello", "Hello"]])) result = df.sort_values(1) expected = df[::-1] tm.assert_frame_equal(result, expected) result = df.sort_values([0, 1], key=lambda col: col.str.lower()) tm.assert_frame_equal(result, df) result = df.sort_values( [0, 1], key=lambda col: col.str.lower(), ascending=False ) expected = df.sort_values(1, key=lambda col: col.str.lower(), ascending=False) tm.assert_frame_equal(result, expected) def test_sort_values_key_empty(self, sort_by_key): df = DataFrame(np.array([])) df.sort_values(0, key=sort_by_key) df.sort_index(key=sort_by_key) def test_changes_length_raises(self): df = DataFrame({"A": [1, 2, 3]}) with pytest.raises(ValueError, match="change the shape"): df.sort_values("A", key=lambda x: x[:1]) def test_sort_values_key_axes(self): df = DataFrame({0: ["Hello", "goodbye"], 1: [0, 1]}) result = df.sort_values(0, key=lambda col: col.str.lower()) expected = df[::-1] tm.assert_frame_equal(result, expected) result = df.sort_values(1, key=lambda col: -col) expected = df[::-1] tm.assert_frame_equal(result, expected) def test_sort_values_key_dict_axis(self): df = DataFrame({0: ["Hello", 0], 1: ["goodbye", 1]}) result = df.sort_values(0, key=lambda col: col.str.lower(), axis=1) expected = df.loc[:, ::-1] tm.assert_frame_equal(result, expected) result = df.sort_values(1, key=lambda col: -col, axis=1) expected = df.loc[:, ::-1] tm.assert_frame_equal(result, expected) @pytest.mark.parametrize("ordered", [True, False]) def test_sort_values_key_casts_to_categorical(self, ordered): # https://github.com/pandas-dev/pandas/issues/36383 categories = ["c", "b", "a"] df = DataFrame({"x": [1, 1, 1], "y": ["a", "b", "c"]}) def sorter(key): if key.name == "y": return pd.Series( Categorical(key, categories=categories, ordered=ordered) ) return key result = df.sort_values(by=["x", "y"], key=sorter) expected = DataFrame( {"x": [1, 1, 1], "y": ["c", "b", "a"]}, index=pd.Index([2, 1, 0]) ) tm.assert_frame_equal(result, expected) @pytest.fixture def df_none(): return DataFrame( { "outer": ["a", "a", "a", "b", "b", "b"], "inner": [1, 2, 2, 2, 1, 1], "A": np.arange(6, 0, -1), ("B", 5): ["one", "one", "two", "two", "one", "one"], } ) @pytest.fixture(params=[["outer"], ["outer", "inner"]]) def df_idx(request, df_none): levels = request.param return df_none.set_index(levels) @pytest.fixture( params=[ "inner", # index level ["outer"], # list of index level "A", # column [("B", 5)], # list of column ["inner", "outer"], # two index levels [("B", 5), "outer"], # index level and column ["A", ("B", 5)], # Two columns ["inner", "outer"], # two index levels and column ] ) def sort_names(request): return request.param @pytest.fixture(params=[True, False]) def ascending(request): return request.param class TestSortValuesLevelAsStr: def test_sort_index_level_and_column_label( self, df_none, df_idx, sort_names, ascending ): # GH#14353 # Get index levels from df_idx levels = df_idx.index.names # Compute expected by sorting on columns and the setting index expected = df_none.sort_values( by=sort_names, ascending=ascending, axis=0 ).set_index(levels) # Compute result sorting on mix on columns and index levels result = df_idx.sort_values(by=sort_names, ascending=ascending, axis=0) tm.assert_frame_equal(result, expected) def test_sort_column_level_and_index_label( self, df_none, df_idx, sort_names, ascending ): # GH#14353 # Get levels from df_idx levels = df_idx.index.names # Compute expected by sorting on axis=0, setting index levels, and then # transposing. For some cases this will result in a frame with # multiple column levels expected = ( df_none.sort_values(by=sort_names, ascending=ascending, axis=0) .set_index(levels) .T ) # Compute result by transposing and sorting on axis=1. result = df_idx.T.sort_values(by=sort_names, ascending=ascending, axis=1) if len(levels) > 1: # Accessing multi-level columns that are not lexsorted raises a # performance warning with tm.assert_produces_warning(PerformanceWarning, check_stacklevel=False): tm.assert_frame_equal(result, expected) else: tm.assert_frame_equal(result, expected)
jreback/pandas
pandas/tests/frame/methods/test_sort_values.py
pandas/tests/extension/base/ops.py
"""Provide configuration end points for Automations.""" from collections import OrderedDict import uuid from homeassistant.components.automation import DOMAIN, PLATFORM_SCHEMA from homeassistant.components.automation.config import async_validate_config_item from homeassistant.config import AUTOMATION_CONFIG_PATH from homeassistant.const import CONF_ID, SERVICE_RELOAD from homeassistant.helpers import config_validation as cv, entity_registry from . import ACTION_DELETE, EditIdBasedConfigView async def async_setup(hass): """Set up the Automation config API.""" async def hook(action, config_key): """post_write_hook for Config View that reloads automations.""" await hass.services.async_call(DOMAIN, SERVICE_RELOAD) if action != ACTION_DELETE: return ent_reg = await entity_registry.async_get_registry(hass) entity_id = ent_reg.async_get_entity_id(DOMAIN, DOMAIN, config_key) if entity_id is None: return ent_reg.async_remove(entity_id) hass.http.register_view( EditAutomationConfigView( DOMAIN, "config", AUTOMATION_CONFIG_PATH, cv.string, PLATFORM_SCHEMA, post_write_hook=hook, data_validator=async_validate_config_item, ) ) return True class EditAutomationConfigView(EditIdBasedConfigView): """Edit automation config.""" def _write_value(self, hass, data, config_key, new_value): """Set value.""" index = None for index, cur_value in enumerate(data): # When people copy paste their automations to the config file, # they sometimes forget to add IDs. Fix it here. if CONF_ID not in cur_value: cur_value[CONF_ID] = uuid.uuid4().hex elif cur_value[CONF_ID] == config_key: break else: cur_value = OrderedDict() cur_value[CONF_ID] = config_key index = len(data) data.append(cur_value) # Iterate through some keys that we want to have ordered in the output updated_value = OrderedDict() for key in ("id", "alias", "description", "trigger", "condition", "action"): if key in cur_value: updated_value[key] = cur_value[key] if key in new_value: updated_value[key] = new_value[key] # We cover all current fields above, but just in case we start # supporting more fields in the future. updated_value.update(cur_value) updated_value.update(new_value) data[index] = updated_value
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/config/automation.py
"""Support for Genius Hub switch/outlet devices.""" from homeassistant.components.switch import DEVICE_CLASS_OUTLET, SwitchEntity from homeassistant.helpers.typing import ConfigType, HomeAssistantType from . import DOMAIN, GeniusZone ATTR_DURATION = "duration" GH_ON_OFF_ZONE = "on / off" async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None ) -> None: """Set up the Genius Hub switch entities.""" if discovery_info is None: return broker = hass.data[DOMAIN]["broker"] async_add_entities( [ GeniusSwitch(broker, z) for z in broker.client.zone_objs if z.data["type"] == GH_ON_OFF_ZONE ] ) class GeniusSwitch(GeniusZone, SwitchEntity): """Representation of a Genius Hub switch.""" @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_OUTLET @property def is_on(self) -> bool: """Return the current state of the on/off zone. The zone is considered 'on' if & only if it is override/on (e.g. timer/on is 'off'). """ return self._zone.data["mode"] == "override" and self._zone.data["setpoint"] async def async_turn_off(self, **kwargs) -> None: """Send the zone to Timer mode. The zone is deemed 'off' in this mode, although the plugs may actually be on. """ await self._zone.set_mode("timer") async def async_turn_on(self, **kwargs) -> None: """Set the zone to override/on ({'setpoint': true}) for x seconds.""" await self._zone.set_override(1, kwargs.get(ATTR_DURATION, 3600))
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/geniushub/switch.py
"""STT constante.""" from enum import Enum DOMAIN = "stt" class AudioCodecs(str, Enum): """Supported Audio codecs.""" PCM = "pcm" OPUS = "opus" class AudioFormats(str, Enum): """Supported Audio formats.""" WAV = "wav" OGG = "ogg" class AudioBitRates(int, Enum): """Supported Audio bit rates.""" BITRATE_8 = 8 BITRATE_16 = 16 BITRATE_24 = 24 BITRATE_32 = 32 class AudioSampleRates(int, Enum): """Supported Audio sample rates.""" SAMPLERATE_8000 = 8000 SAMPLERATE_11000 = 11000 SAMPLERATE_16000 = 16000 SAMPLERATE_18900 = 18900 SAMPLERATE_22000 = 22000 SAMPLERATE_32000 = 32000 SAMPLERATE_37800 = 37800 SAMPLERATE_44100 = 44100 SAMPLERATE_48000 = 48000 class AudioChannels(int, Enum): """Supported Audio channel.""" CHANNEL_MONO = 1 CHANNEL_STEREO = 2 class SpeechResultState(str, Enum): """Result state of speech.""" SUCCESS = "success" ERROR = "error"
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/stt/const.py
"""Helpers for amcrest component.""" import logging from .const import DOMAIN def service_signal(service, *args): """Encode signal.""" return "_".join([DOMAIN, service, *args]) def log_update_error(logger, action, name, entity_type, error, level=logging.ERROR): """Log an update error.""" logger.log( level, "Could not %s %s %s due to error: %s", action, name, entity_type, error.__class__.__name__, )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/amcrest/helpers.py
"""Component to interface with various media players.""" import asyncio import base64 import collections from datetime import timedelta import functools as ft import hashlib import logging from random import SystemRandom from typing import Optional from urllib.parse import urlparse from aiohttp import web from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE from aiohttp.typedefs import LooseHeaders import async_timeout import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView from homeassistant.const import ( HTTP_INTERNAL_SERVER_ERROR, HTTP_NOT_FOUND, HTTP_OK, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK, SERVICE_MEDIA_STOP, SERVICE_SHUFFLE_SET, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, STATE_IDLE, STATE_OFF, STATE_PLAYING, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.network import get_url from homeassistant.loader import bind_hass from .const import ( ATTR_APP_ID, ATTR_APP_NAME, ATTR_INPUT_SOURCE, ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_ALBUM_ARTIST, ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ARTIST, ATTR_MEDIA_CHANNEL, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_DURATION, ATTR_MEDIA_ENQUEUE, ATTR_MEDIA_EPISODE, ATTR_MEDIA_PLAYLIST, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, ATTR_MEDIA_SEASON, ATTR_MEDIA_SEEK_POSITION, ATTR_MEDIA_SERIES_TITLE, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TITLE, ATTR_MEDIA_TRACK, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, ATTR_SOUND_MODE, ATTR_SOUND_MODE_LIST, DOMAIN, SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, SUPPORT_CLEAR_PLAYLIST, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_SELECT_SOUND_MODE, SUPPORT_SELECT_SOURCE, SUPPORT_SHUFFLE_SET, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP, ) # mypy: allow-untyped-defs, no-check-untyped-defs _LOGGER = logging.getLogger(__name__) _RND = SystemRandom() ENTITY_ID_FORMAT = DOMAIN + ".{}" CACHE_IMAGES = "images" CACHE_MAXSIZE = "maxsize" CACHE_LOCK = "lock" CACHE_URL = "url" CACHE_CONTENT = "content" ENTITY_IMAGE_CACHE = {CACHE_IMAGES: collections.OrderedDict(), CACHE_MAXSIZE: 16} SCAN_INTERVAL = timedelta(seconds=10) DEVICE_CLASS_TV = "tv" DEVICE_CLASS_SPEAKER = "speaker" DEVICE_CLASSES = [DEVICE_CLASS_TV, DEVICE_CLASS_SPEAKER] DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES)) MEDIA_PLAYER_PLAY_MEDIA_SCHEMA = { vol.Required(ATTR_MEDIA_CONTENT_TYPE): cv.string, vol.Required(ATTR_MEDIA_CONTENT_ID): cv.string, vol.Optional(ATTR_MEDIA_ENQUEUE): cv.boolean, } ATTR_TO_PROPERTY = [ ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_DURATION, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, ATTR_MEDIA_TITLE, ATTR_MEDIA_ARTIST, ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ALBUM_ARTIST, ATTR_MEDIA_TRACK, ATTR_MEDIA_SERIES_TITLE, ATTR_MEDIA_SEASON, ATTR_MEDIA_EPISODE, ATTR_MEDIA_CHANNEL, ATTR_MEDIA_PLAYLIST, ATTR_APP_ID, ATTR_APP_NAME, ATTR_INPUT_SOURCE, ATTR_SOUND_MODE, ATTR_MEDIA_SHUFFLE, ] @bind_hass def is_on(hass, entity_id=None): """ Return true if specified media player entity_id is on. Check all media player if no entity_id specified. """ entity_ids = [entity_id] if entity_id else hass.states.entity_ids(DOMAIN) return any( not hass.states.is_state(entity_id, STATE_OFF) for entity_id in entity_ids ) WS_TYPE_MEDIA_PLAYER_THUMBNAIL = "media_player_thumbnail" SCHEMA_WEBSOCKET_GET_THUMBNAIL = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend( {"type": WS_TYPE_MEDIA_PLAYER_THUMBNAIL, "entity_id": cv.entity_id} ) def _rename_keys(**keys): """Create validator that renames keys. Necessary because the service schema names do not match the command parameters. Async friendly. """ def rename(value): for to_key, from_key in keys.items(): if from_key in value: value[to_key] = value.pop(from_key) return value return rename async def async_setup(hass, config): """Track states and offer events for media_players.""" component = hass.data[DOMAIN] = EntityComponent( logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL ) hass.components.websocket_api.async_register_command( WS_TYPE_MEDIA_PLAYER_THUMBNAIL, websocket_handle_thumbnail, SCHEMA_WEBSOCKET_GET_THUMBNAIL, ) hass.http.register_view(MediaPlayerImageView(component)) await component.async_setup(config) component.async_register_entity_service( SERVICE_TURN_ON, {}, "async_turn_on", [SUPPORT_TURN_ON] ) component.async_register_entity_service( SERVICE_TURN_OFF, {}, "async_turn_off", [SUPPORT_TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, {}, "async_toggle", [SUPPORT_TURN_OFF | SUPPORT_TURN_ON] ) component.async_register_entity_service( SERVICE_VOLUME_UP, {}, "async_volume_up", [SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP], ) component.async_register_entity_service( SERVICE_VOLUME_DOWN, {}, "async_volume_down", [SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP], ) component.async_register_entity_service( SERVICE_MEDIA_PLAY_PAUSE, {}, "async_media_play_pause", [SUPPORT_PLAY | SUPPORT_PAUSE], ) component.async_register_entity_service( SERVICE_MEDIA_PLAY, {}, "async_media_play", [SUPPORT_PLAY] ) component.async_register_entity_service( SERVICE_MEDIA_PAUSE, {}, "async_media_pause", [SUPPORT_PAUSE] ) component.async_register_entity_service( SERVICE_MEDIA_STOP, {}, "async_media_stop", [SUPPORT_STOP] ) component.async_register_entity_service( SERVICE_MEDIA_NEXT_TRACK, {}, "async_media_next_track", [SUPPORT_NEXT_TRACK] ) component.async_register_entity_service( SERVICE_MEDIA_PREVIOUS_TRACK, {}, "async_media_previous_track", [SUPPORT_PREVIOUS_TRACK], ) component.async_register_entity_service( SERVICE_CLEAR_PLAYLIST, {}, "async_clear_playlist", [SUPPORT_CLEAR_PLAYLIST] ) component.async_register_entity_service( SERVICE_VOLUME_SET, vol.All( cv.make_entity_service_schema( {vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float} ), _rename_keys(volume=ATTR_MEDIA_VOLUME_LEVEL), ), "async_set_volume_level", [SUPPORT_VOLUME_SET], ) component.async_register_entity_service( SERVICE_VOLUME_MUTE, vol.All( cv.make_entity_service_schema( {vol.Required(ATTR_MEDIA_VOLUME_MUTED): cv.boolean} ), _rename_keys(mute=ATTR_MEDIA_VOLUME_MUTED), ), "async_mute_volume", [SUPPORT_VOLUME_MUTE], ) component.async_register_entity_service( SERVICE_MEDIA_SEEK, vol.All( cv.make_entity_service_schema( { vol.Required(ATTR_MEDIA_SEEK_POSITION): vol.All( vol.Coerce(float), vol.Range(min=0) ) } ), _rename_keys(position=ATTR_MEDIA_SEEK_POSITION), ), "async_media_seek", [SUPPORT_SEEK], ) component.async_register_entity_service( SERVICE_SELECT_SOURCE, {vol.Required(ATTR_INPUT_SOURCE): cv.string}, "async_select_source", [SUPPORT_SELECT_SOURCE], ) component.async_register_entity_service( SERVICE_SELECT_SOUND_MODE, {vol.Required(ATTR_SOUND_MODE): cv.string}, "async_select_sound_mode", [SUPPORT_SELECT_SOUND_MODE], ) component.async_register_entity_service( SERVICE_PLAY_MEDIA, vol.All( cv.make_entity_service_schema(MEDIA_PLAYER_PLAY_MEDIA_SCHEMA), _rename_keys( media_type=ATTR_MEDIA_CONTENT_TYPE, media_id=ATTR_MEDIA_CONTENT_ID, enqueue=ATTR_MEDIA_ENQUEUE, ), ), "async_play_media", [SUPPORT_PLAY_MEDIA], ) component.async_register_entity_service( SERVICE_SHUFFLE_SET, {vol.Required(ATTR_MEDIA_SHUFFLE): cv.boolean}, "async_set_shuffle", [SUPPORT_SHUFFLE_SET], ) return True async def async_setup_entry(hass, entry): """Set up a config entry.""" return await hass.data[DOMAIN].async_setup_entry(entry) async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.data[DOMAIN].async_unload_entry(entry) class MediaPlayerEntity(Entity): """ABC for media player entities.""" _access_token: Optional[str] = None # Implement these for your media player @property def state(self): """State of the player.""" return None @property def access_token(self) -> str: """Access token for this media player.""" if self._access_token is None: self._access_token = hashlib.sha256( _RND.getrandbits(256).to_bytes(32, "little") ).hexdigest() return self._access_token @property def volume_level(self): """Volume level of the media player (0..1).""" return None @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return None @property def media_content_id(self): """Content ID of current playing media.""" return None @property def media_content_type(self): """Content type of current playing media.""" return None @property def media_duration(self): """Duration of current playing media in seconds.""" return None @property def media_position(self): """Position of current playing media in seconds.""" return None @property def media_position_updated_at(self): """When was the position of the current playing media valid. Returns value from homeassistant.util.dt.utcnow(). """ return None @property def media_image_url(self): """Image url of current playing media.""" return None @property def media_image_remotely_accessible(self) -> bool: """If the image url is remotely accessible.""" return False @property def media_image_hash(self): """Hash value for media image.""" url = self.media_image_url if url is not None: return hashlib.sha256(url.encode("utf-8")).hexdigest()[:16] return None async def async_get_media_image(self): """Fetch media image of current playing image.""" url = self.media_image_url if url is None: return None, None return await _async_fetch_image(self.hass, url) @property def media_title(self): """Title of current playing media.""" return None @property def media_artist(self): """Artist of current playing media, music track only.""" return None @property def media_album_name(self): """Album name of current playing media, music track only.""" return None @property def media_album_artist(self): """Album artist of current playing media, music track only.""" return None @property def media_track(self): """Track number of current playing media, music track only.""" return None @property def media_series_title(self): """Title of series of current playing media, TV show only.""" return None @property def media_season(self): """Season of current playing media, TV show only.""" return None @property def media_episode(self): """Episode of current playing media, TV show only.""" return None @property def media_channel(self): """Channel currently playing.""" return None @property def media_playlist(self): """Title of Playlist currently playing.""" return None @property def app_id(self): """ID of the current running app.""" return None @property def app_name(self): """Name of the current running app.""" return None @property def source(self): """Name of the current input source.""" return None @property def source_list(self): """List of available input sources.""" return None @property def sound_mode(self): """Name of the current sound mode.""" return None @property def sound_mode_list(self): """List of available sound modes.""" return None @property def shuffle(self): """Boolean if shuffle is enabled.""" return None @property def supported_features(self): """Flag media player features that are supported.""" return 0 def turn_on(self): """Turn the media player on.""" raise NotImplementedError() async def async_turn_on(self): """Turn the media player on.""" await self.hass.async_add_job(self.turn_on) def turn_off(self): """Turn the media player off.""" raise NotImplementedError() async def async_turn_off(self): """Turn the media player off.""" await self.hass.async_add_job(self.turn_off) def mute_volume(self, mute): """Mute the volume.""" raise NotImplementedError() async def async_mute_volume(self, mute): """Mute the volume.""" await self.hass.async_add_job(self.mute_volume, mute) def set_volume_level(self, volume): """Set volume level, range 0..1.""" raise NotImplementedError() async def async_set_volume_level(self, volume): """Set volume level, range 0..1.""" await self.hass.async_add_job(self.set_volume_level, volume) def media_play(self): """Send play command.""" raise NotImplementedError() async def async_media_play(self): """Send play command.""" await self.hass.async_add_job(self.media_play) def media_pause(self): """Send pause command.""" raise NotImplementedError() async def async_media_pause(self): """Send pause command.""" await self.hass.async_add_job(self.media_pause) def media_stop(self): """Send stop command.""" raise NotImplementedError() async def async_media_stop(self): """Send stop command.""" await self.hass.async_add_job(self.media_stop) def media_previous_track(self): """Send previous track command.""" raise NotImplementedError() async def async_media_previous_track(self): """Send previous track command.""" await self.hass.async_add_job(self.media_previous_track) def media_next_track(self): """Send next track command.""" raise NotImplementedError() async def async_media_next_track(self): """Send next track command.""" await self.hass.async_add_job(self.media_next_track) def media_seek(self, position): """Send seek command.""" raise NotImplementedError() async def async_media_seek(self, position): """Send seek command.""" await self.hass.async_add_job(self.media_seek, position) def play_media(self, media_type, media_id, **kwargs): """Play a piece of media.""" raise NotImplementedError() async def async_play_media(self, media_type, media_id, **kwargs): """Play a piece of media.""" await self.hass.async_add_job( ft.partial(self.play_media, media_type, media_id, **kwargs) ) def select_source(self, source): """Select input source.""" raise NotImplementedError() async def async_select_source(self, source): """Select input source.""" await self.hass.async_add_job(self.select_source, source) def select_sound_mode(self, sound_mode): """Select sound mode.""" raise NotImplementedError() async def async_select_sound_mode(self, sound_mode): """Select sound mode.""" await self.hass.async_add_job(self.select_sound_mode, sound_mode) def clear_playlist(self): """Clear players playlist.""" raise NotImplementedError() async def async_clear_playlist(self): """Clear players playlist.""" await self.hass.async_add_job(self.clear_playlist) def set_shuffle(self, shuffle): """Enable/disable shuffle mode.""" raise NotImplementedError() async def async_set_shuffle(self, shuffle): """Enable/disable shuffle mode.""" await self.hass.async_add_job(self.set_shuffle, shuffle) # No need to overwrite these. @property def support_play(self): """Boolean if play is supported.""" return bool(self.supported_features & SUPPORT_PLAY) @property def support_pause(self): """Boolean if pause is supported.""" return bool(self.supported_features & SUPPORT_PAUSE) @property def support_stop(self): """Boolean if stop is supported.""" return bool(self.supported_features & SUPPORT_STOP) @property def support_seek(self): """Boolean if seek is supported.""" return bool(self.supported_features & SUPPORT_SEEK) @property def support_volume_set(self): """Boolean if setting volume is supported.""" return bool(self.supported_features & SUPPORT_VOLUME_SET) @property def support_volume_mute(self): """Boolean if muting volume is supported.""" return bool(self.supported_features & SUPPORT_VOLUME_MUTE) @property def support_previous_track(self): """Boolean if previous track command supported.""" return bool(self.supported_features & SUPPORT_PREVIOUS_TRACK) @property def support_next_track(self): """Boolean if next track command supported.""" return bool(self.supported_features & SUPPORT_NEXT_TRACK) @property def support_play_media(self): """Boolean if play media command supported.""" return bool(self.supported_features & SUPPORT_PLAY_MEDIA) @property def support_select_source(self): """Boolean if select source command supported.""" return bool(self.supported_features & SUPPORT_SELECT_SOURCE) @property def support_select_sound_mode(self): """Boolean if select sound mode command supported.""" return bool(self.supported_features & SUPPORT_SELECT_SOUND_MODE) @property def support_clear_playlist(self): """Boolean if clear playlist command supported.""" return bool(self.supported_features & SUPPORT_CLEAR_PLAYLIST) @property def support_shuffle_set(self): """Boolean if shuffle is supported.""" return bool(self.supported_features & SUPPORT_SHUFFLE_SET) async def async_toggle(self): """Toggle the power on the media player.""" if hasattr(self, "toggle"): # pylint: disable=no-member await self.hass.async_add_job(self.toggle) return if self.state in [STATE_OFF, STATE_IDLE]: await self.async_turn_on() else: await self.async_turn_off() async def async_volume_up(self): """Turn volume up for media player. This method is a coroutine. """ if hasattr(self, "volume_up"): # pylint: disable=no-member await self.hass.async_add_job(self.volume_up) return if self.volume_level < 1 and self.supported_features & SUPPORT_VOLUME_SET: await self.async_set_volume_level(min(1, self.volume_level + 0.1)) async def async_volume_down(self): """Turn volume down for media player. This method is a coroutine. """ if hasattr(self, "volume_down"): # pylint: disable=no-member await self.hass.async_add_job(self.volume_down) return if self.volume_level > 0 and self.supported_features & SUPPORT_VOLUME_SET: await self.async_set_volume_level(max(0, self.volume_level - 0.1)) async def async_media_play_pause(self): """Play or pause the media player.""" if hasattr(self, "media_play_pause"): # pylint: disable=no-member await self.hass.async_add_job(self.media_play_pause) return if self.state == STATE_PLAYING: await self.async_media_pause() else: await self.async_media_play() @property def entity_picture(self): """Return image of the media playing.""" if self.state == STATE_OFF: return None if self.media_image_remotely_accessible: return self.media_image_url return self.media_image_local @property def media_image_local(self): """Return local url to media image.""" image_hash = self.media_image_hash if image_hash is None: return None return ( f"/api/media_player_proxy/{self.entity_id}?" f"token={self.access_token}&cache={image_hash}" ) @property def capability_attributes(self): """Return capability attributes.""" supported_features = self.supported_features or 0 data = {} if supported_features & SUPPORT_SELECT_SOURCE: source_list = self.source_list if source_list: data[ATTR_INPUT_SOURCE_LIST] = source_list if supported_features & SUPPORT_SELECT_SOUND_MODE: sound_mode_list = self.sound_mode_list if sound_mode_list: data[ATTR_SOUND_MODE_LIST] = sound_mode_list return data @property def state_attributes(self): """Return the state attributes.""" if self.state == STATE_OFF: return None state_attr = {} for attr in ATTR_TO_PROPERTY: value = getattr(self, attr) if value is not None: state_attr[attr] = value if self.media_image_remotely_accessible: state_attr["entity_picture_local"] = self.media_image_local return state_attr async def _async_fetch_image(hass, url): """Fetch image. Images are cached in memory (the images are typically 10-100kB in size). """ cache_images = ENTITY_IMAGE_CACHE[CACHE_IMAGES] cache_maxsize = ENTITY_IMAGE_CACHE[CACHE_MAXSIZE] if urlparse(url).hostname is None: url = f"{get_url(hass)}{url}" if url not in cache_images: cache_images[url] = {CACHE_LOCK: asyncio.Lock()} async with cache_images[url][CACHE_LOCK]: if CACHE_CONTENT in cache_images[url]: return cache_images[url][CACHE_CONTENT] content, content_type = (None, None) websession = async_get_clientsession(hass) try: with async_timeout.timeout(10): response = await websession.get(url) if response.status == HTTP_OK: content = await response.read() content_type = response.headers.get(CONTENT_TYPE) if content_type: content_type = content_type.split(";")[0] cache_images[url][CACHE_CONTENT] = content, content_type except asyncio.TimeoutError: pass while len(cache_images) > cache_maxsize: cache_images.popitem(last=False) return content, content_type class MediaPlayerImageView(HomeAssistantView): """Media player view to serve an image.""" requires_auth = False url = "/api/media_player_proxy/{entity_id}" name = "api:media_player:image" def __init__(self, component): """Initialize a media player view.""" self.component = component async def get(self, request: web.Request, entity_id: str) -> web.Response: """Start a get request.""" player = self.component.get_entity(entity_id) if player is None: status = HTTP_NOT_FOUND if request[KEY_AUTHENTICATED] else 401 return web.Response(status=status) authenticated = ( request[KEY_AUTHENTICATED] or request.query.get("token") == player.access_token ) if not authenticated: return web.Response(status=401) data, content_type = await player.async_get_media_image() if data is None: return web.Response(status=HTTP_INTERNAL_SERVER_ERROR) headers: LooseHeaders = {CACHE_CONTROL: "max-age=3600"} return web.Response(body=data, content_type=content_type, headers=headers) @websocket_api.async_response async def websocket_handle_thumbnail(hass, connection, msg): """Handle get media player cover command. Async friendly. """ component = hass.data[DOMAIN] player = component.get_entity(msg["entity_id"]) if player is None: connection.send_message( websocket_api.error_message( msg["id"], "entity_not_found", "Entity not found" ) ) return _LOGGER.warning( "The websocket command media_player_thumbnail is deprecated. Use /api/media_player_proxy instead." ) data, content_type = await player.async_get_media_image() if data is None: connection.send_message( websocket_api.error_message( msg["id"], "thumbnail_fetch_failed", "Failed to fetch thumbnail" ) ) return await connection.send_big_result( msg["id"], { "content_type": content_type, "content": base64.b64encode(data).decode("utf-8"), }, ) class MediaPlayerDevice(MediaPlayerEntity): """ABC for media player devices (for backwards compatibility).""" def __init_subclass__(cls, **kwargs): """Print deprecation warning.""" super().__init_subclass__(**kwargs) _LOGGER.warning( "MediaPlayerDevice is deprecated, modify %s to extend MediaPlayerEntity", cls.__name__, )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/media_player/__init__.py
"""Add support for the Xiaomi TVs.""" import logging import pymitv import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_STEP, ) from homeassistant.const import CONF_HOST, CONF_NAME, STATE_OFF, STATE_ON import homeassistant.helpers.config_validation as cv DEFAULT_NAME = "Xiaomi TV" _LOGGER = logging.getLogger(__name__) SUPPORT_XIAOMI_TV = SUPPORT_VOLUME_STEP | SUPPORT_TURN_ON | SUPPORT_TURN_OFF # No host is needed for configuration, however it can be set. PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Xiaomi TV platform.""" # If a hostname is set. Discovery is skipped. host = config.get(CONF_HOST) name = config.get(CONF_NAME) if host is not None: # Check if there's a valid TV at the IP address. if not pymitv.Discover().check_ip(host): _LOGGER.error("Could not find Xiaomi TV with specified IP: %s", host) else: # Register TV with Home Assistant. add_entities([XiaomiTV(host, name)]) else: # Otherwise, discover TVs on network. add_entities(XiaomiTV(tv, DEFAULT_NAME) for tv in pymitv.Discover().scan()) class XiaomiTV(MediaPlayerEntity): """Represent the Xiaomi TV for Home Assistant.""" def __init__(self, ip, name): """Receive IP address and name to construct class.""" # Initialize the Xiaomi TV. self._tv = pymitv.TV(ip) # Default name value, only to be overridden by user. self._name = name self._state = STATE_OFF @property def name(self): """Return the display name of this TV.""" return self._name @property def state(self): """Return _state variable, containing the appropriate constant.""" return self._state @property def assumed_state(self): """Indicate that state is assumed.""" return True @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_XIAOMI_TV def turn_off(self): """ Instruct the TV to turn sleep. This is done instead of turning off, because the TV won't accept any input when turned off. Thus, the user would be unable to turn the TV back on, unless it's done manually. """ if self._state != STATE_OFF: self._tv.sleep() self._state = STATE_OFF def turn_on(self): """Wake the TV back up from sleep.""" if self._state != STATE_ON: self._tv.wake() self._state = STATE_ON def volume_up(self): """Increase volume by one.""" self._tv.volume_up() def volume_down(self): """Decrease volume by one.""" self._tv.volume_down()
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/xiaomi_tv/media_player.py
"""Time-based One Time Password auth module.""" import asyncio from io import BytesIO import logging from typing import Any, Dict, Optional, Tuple import voluptuous as vol from homeassistant.auth.models import User from homeassistant.core import HomeAssistant from . import ( MULTI_FACTOR_AUTH_MODULE_SCHEMA, MULTI_FACTOR_AUTH_MODULES, MultiFactorAuthModule, SetupFlow, ) REQUIREMENTS = ["pyotp==2.3.0", "PyQRCode==1.2.1"] CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA) STORAGE_VERSION = 1 STORAGE_KEY = "auth_module.totp" STORAGE_USERS = "users" STORAGE_USER_ID = "user_id" STORAGE_OTA_SECRET = "ota_secret" INPUT_FIELD_CODE = "code" DUMMY_SECRET = "FPPTH34D4E3MI2HG" _LOGGER = logging.getLogger(__name__) def _generate_qr_code(data: str) -> str: """Generate a base64 PNG string represent QR Code image of data.""" import pyqrcode # pylint: disable=import-outside-toplevel qr_code = pyqrcode.create(data) with BytesIO() as buffer: qr_code.svg(file=buffer, scale=4) return str( buffer.getvalue() .decode("ascii") .replace("\n", "") .replace( '<?xml version="1.0" encoding="UTF-8"?>' '<svg xmlns="http://www.w3.org/2000/svg"', "<svg", ) ) def _generate_secret_and_qr_code(username: str) -> Tuple[str, str, str]: """Generate a secret, url, and QR code.""" import pyotp # pylint: disable=import-outside-toplevel ota_secret = pyotp.random_base32() url = pyotp.totp.TOTP(ota_secret).provisioning_uri( username, issuer_name="Home Assistant" ) image = _generate_qr_code(url) return ota_secret, url, image @MULTI_FACTOR_AUTH_MODULES.register("totp") class TotpAuthModule(MultiFactorAuthModule): """Auth module validate time-based one time password.""" DEFAULT_TITLE = "Time-based One Time Password" MAX_RETRY_TIME = 5 def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None: """Initialize the user data store.""" super().__init__(hass, config) self._users: Optional[Dict[str, str]] = None self._user_store = hass.helpers.storage.Store( STORAGE_VERSION, STORAGE_KEY, private=True ) self._init_lock = asyncio.Lock() @property def input_schema(self) -> vol.Schema: """Validate login flow input data.""" return vol.Schema({INPUT_FIELD_CODE: str}) async def _async_load(self) -> None: """Load stored data.""" async with self._init_lock: if self._users is not None: return data = await self._user_store.async_load() if data is None: data = {STORAGE_USERS: {}} self._users = data.get(STORAGE_USERS, {}) async def _async_save(self) -> None: """Save data.""" await self._user_store.async_save({STORAGE_USERS: self._users}) def _add_ota_secret(self, user_id: str, secret: Optional[str] = None) -> str: """Create a ota_secret for user.""" import pyotp # pylint: disable=import-outside-toplevel ota_secret: str = secret or pyotp.random_base32() self._users[user_id] = ota_secret # type: ignore return ota_secret async def async_setup_flow(self, user_id: str) -> SetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow """ user = await self.hass.auth.async_get_user(user_id) assert user is not None return TotpSetupFlow(self, self.input_schema, user) async def async_setup_user(self, user_id: str, setup_data: Any) -> str: """Set up auth module for user.""" if self._users is None: await self._async_load() result = await self.hass.async_add_executor_job( self._add_ota_secret, user_id, setup_data.get("secret") ) await self._async_save() return result async def async_depose_user(self, user_id: str) -> None: """Depose auth module for user.""" if self._users is None: await self._async_load() if self._users.pop(user_id, None): # type: ignore await self._async_save() async def async_is_user_setup(self, user_id: str) -> bool: """Return whether user is setup.""" if self._users is None: await self._async_load() return user_id in self._users # type: ignore async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool: """Return True if validation passed.""" if self._users is None: await self._async_load() # user_input has been validate in caller # set INPUT_FIELD_CODE as vol.Required is not user friendly return await self.hass.async_add_executor_job( self._validate_2fa, user_id, user_input.get(INPUT_FIELD_CODE, "") ) def _validate_2fa(self, user_id: str, code: str) -> bool: """Validate two factor authentication code.""" import pyotp # pylint: disable=import-outside-toplevel ota_secret = self._users.get(user_id) # type: ignore if ota_secret is None: # even we cannot find user, we still do verify # to make timing the same as if user was found. pyotp.TOTP(DUMMY_SECRET).verify(code, valid_window=1) return False return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1)) class TotpSetupFlow(SetupFlow): """Handler for the setup flow.""" def __init__( self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User ) -> None: """Initialize the setup flow.""" super().__init__(auth_module, setup_schema, user.id) # to fix typing complaint self._auth_module: TotpAuthModule = auth_module self._user = user self._ota_secret: Optional[str] = None self._url = None # type Optional[str] self._image = None # type Optional[str] async def async_step_init( self, user_input: Optional[Dict[str, str]] = None ) -> Dict[str, Any]: """Handle the first step of setup flow. Return self.async_show_form(step_id='init') if user_input is None. Return self.async_create_entry(data={'result': result}) if finish. """ import pyotp # pylint: disable=import-outside-toplevel errors: Dict[str, str] = {} if user_input: verified = await self.hass.async_add_executor_job( # type: ignore pyotp.TOTP(self._ota_secret).verify, user_input["code"] ) if verified: result = await self._auth_module.async_setup_user( self._user_id, {"secret": self._ota_secret} ) return self.async_create_entry( title=self._auth_module.name, data={"result": result} ) errors["base"] = "invalid_code" else: hass = self._auth_module.hass ( self._ota_secret, self._url, self._image, ) = await hass.async_add_executor_job( _generate_secret_and_qr_code, # type: ignore str(self._user.name), ) return self.async_show_form( step_id="init", data_schema=self._setup_schema, description_placeholders={ "code": self._ota_secret, "url": self._url, "qr_code": self._image, }, errors=errors, )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/auth/mfa_modules/totp.py
"""Support for MAX! binary sensors via MAX! Cube.""" import logging from homeassistant.components.binary_sensor import BinarySensorEntity from . import DATA_KEY _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Iterate through all MAX! Devices and add window shutters.""" devices = [] for handler in hass.data[DATA_KEY].values(): cube = handler.cube for device in cube.devices: name = f"{cube.room_by_id(device.room_id).name} {device.name}" # Only add Window Shutters if cube.is_windowshutter(device): devices.append(MaxCubeShutter(handler, name, device.rf_address)) if devices: add_entities(devices) class MaxCubeShutter(BinarySensorEntity): """Representation of a MAX! Cube Binary Sensor device.""" def __init__(self, handler, name, rf_address): """Initialize MAX! Cube BinarySensorEntity.""" self._name = name self._sensor_type = "window" self._rf_address = rf_address self._cubehandle = handler self._state = None @property def should_poll(self): """Return the polling state.""" return True @property def name(self): """Return the name of the BinarySensorEntity.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return self._sensor_type @property def is_on(self): """Return true if the binary sensor is on/open.""" return self._state def update(self): """Get latest data from MAX! Cube.""" self._cubehandle.update() device = self._cubehandle.cube.device_by_rf(self._rf_address) self._state = device.is_open
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/maxcube/binary_sensor.py
"""Support for Xiaomi Miio.""" import logging from homeassistant import config_entries, core from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.helpers import device_registry as dr from .config_flow import CONF_FLOW_TYPE, CONF_GATEWAY from .const import DOMAIN from .gateway import ConnectXiaomiGateway _LOGGER = logging.getLogger(__name__) GATEWAY_PLATFORMS = ["alarm_control_panel"] async def async_setup(hass: core.HomeAssistant, config: dict): """Set up the Xiaomi Miio component.""" return True async def async_setup_entry( hass: core.HomeAssistant, entry: config_entries.ConfigEntry ): """Set up the Xiaomi Miio components from a config entry.""" hass.data[DOMAIN] = {} if entry.data[CONF_FLOW_TYPE] == CONF_GATEWAY: if not await async_setup_gateway_entry(hass, entry): return False return True async def async_setup_gateway_entry( hass: core.HomeAssistant, entry: config_entries.ConfigEntry ): """Set up the Xiaomi Gateway component from a config entry.""" host = entry.data[CONF_HOST] token = entry.data[CONF_TOKEN] name = entry.title gateway_id = entry.unique_id # For backwards compat if entry.unique_id.endswith("-gateway"): hass.config_entries.async_update_entry(entry, unique_id=entry.data["mac"]) # Connect to gateway gateway = ConnectXiaomiGateway(hass) if not await gateway.async_connect_gateway(host, token): return False gateway_info = gateway.gateway_info hass.data[DOMAIN][entry.entry_id] = gateway.gateway_device gateway_model = f"{gateway_info.model}-{gateway_info.hardware_version}" device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, gateway_info.mac_address)}, identifiers={(DOMAIN, gateway_id)}, manufacturer="Xiaomi", name=name, model=gateway_model, sw_version=gateway_info.firmware_version, ) for component in GATEWAY_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/xiaomi_miio/__init__.py
"""Support for switching devices via Pilight to on and off.""" import logging import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, LightEntity, ) from homeassistant.const import CONF_LIGHTS import homeassistant.helpers.config_validation as cv from .base_class import SWITCHES_SCHEMA, PilightBaseDevice from .const import CONF_DIMLEVEL_MAX, CONF_DIMLEVEL_MIN _LOGGER = logging.getLogger(__name__) LIGHTS_SCHEMA = SWITCHES_SCHEMA.extend( { vol.Optional(CONF_DIMLEVEL_MIN, default=0): cv.positive_int, vol.Optional(CONF_DIMLEVEL_MAX, default=15): cv.positive_int, } ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_LIGHTS): vol.Schema({cv.string: LIGHTS_SCHEMA})} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Pilight platform.""" switches = config.get(CONF_LIGHTS) devices = [] for dev_name, dev_config in switches.items(): devices.append(PilightLight(hass, dev_name, dev_config)) add_entities(devices) class PilightLight(PilightBaseDevice, LightEntity): """Representation of a Pilight switch.""" def __init__(self, hass, name, config): """Initialize a switch.""" super().__init__(hass, name, config) self._dimlevel_min = config.get(CONF_DIMLEVEL_MIN) self._dimlevel_max = config.get(CONF_DIMLEVEL_MAX) @property def brightness(self): """Return the brightness.""" return self._brightness @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS def turn_on(self, **kwargs): """Turn the switch on by calling pilight.send service with on code.""" self._brightness = kwargs.get(ATTR_BRIGHTNESS, 255) dimlevel = int(self._brightness / (255 / self._dimlevel_max)) self.set_state(turn_on=True, dimlevel=dimlevel)
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/pilight/light.py
"""Support for Fibaro lights.""" import asyncio from functools import partial import logging from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.const import CONF_WHITE_VALUE import homeassistant.util.color as color_util from . import CONF_COLOR, CONF_DIMMING, CONF_RESET_COLOR, FIBARO_DEVICES, FibaroDevice _LOGGER = logging.getLogger(__name__) def scaleto255(value): """Scale the input value from 0-100 to 0-255.""" # Fibaro has a funny way of storing brightness either 0-100 or 0-99 # depending on device type (e.g. dimmer vs led) if value > 98: value = 100 return max(0, min(255, ((value * 255.0) / 100.0))) def scaleto100(value): """Scale the input value from 0-255 to 0-100.""" # Make sure a low but non-zero value is not rounded down to zero if 0 < value < 3: return 1 return max(0, min(100, ((value * 100.0) / 255.0))) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Perform the setup for Fibaro controller devices.""" if discovery_info is None: return async_add_entities( [FibaroLight(device) for device in hass.data[FIBARO_DEVICES]["light"]], True ) class FibaroLight(FibaroDevice, LightEntity): """Representation of a Fibaro Light, including dimmable.""" def __init__(self, fibaro_device): """Initialize the light.""" self._brightness = None self._color = (0, 0) self._last_brightness = 0 self._supported_flags = 0 self._update_lock = asyncio.Lock() self._white = 0 devconf = fibaro_device.device_config self._reset_color = devconf.get(CONF_RESET_COLOR, False) supports_color = ( "color" in fibaro_device.properties and "setColor" in fibaro_device.actions ) supports_dimming = "levelChange" in fibaro_device.interfaces supports_white_v = "setW" in fibaro_device.actions # Configuration can override default capability detection if devconf.get(CONF_DIMMING, supports_dimming): self._supported_flags |= SUPPORT_BRIGHTNESS if devconf.get(CONF_COLOR, supports_color): self._supported_flags |= SUPPORT_COLOR if devconf.get(CONF_WHITE_VALUE, supports_white_v): self._supported_flags |= SUPPORT_WHITE_VALUE super().__init__(fibaro_device) self.entity_id = f"{DOMAIN}.{self.ha_id}" @property def brightness(self): """Return the brightness of the light.""" return scaleto255(self._brightness) @property def hs_color(self): """Return the color of the light.""" return self._color @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def supported_features(self): """Flag supported features.""" return self._supported_flags async def async_turn_on(self, **kwargs): """Turn the light on.""" async with self._update_lock: await self.hass.async_add_executor_job(partial(self._turn_on, **kwargs)) def _turn_on(self, **kwargs): """Really turn the light on.""" if self._supported_flags & SUPPORT_BRIGHTNESS: target_brightness = kwargs.get(ATTR_BRIGHTNESS) # No brightness specified, so we either restore it to # last brightness or switch it on at maximum level if target_brightness is None: if self._brightness == 0: if self._last_brightness: self._brightness = self._last_brightness else: self._brightness = 100 else: # We set it to the target brightness and turn it on self._brightness = scaleto100(target_brightness) if self._supported_flags & SUPPORT_COLOR: if ( self._reset_color and kwargs.get(ATTR_WHITE_VALUE) is None and kwargs.get(ATTR_HS_COLOR) is None and kwargs.get(ATTR_BRIGHTNESS) is None ): self._color = (100, 0) # Update based on parameters self._white = kwargs.get(ATTR_WHITE_VALUE, self._white) self._color = kwargs.get(ATTR_HS_COLOR, self._color) rgb = color_util.color_hs_to_RGB(*self._color) self.call_set_color( round(rgb[0] * self._brightness / 100.0), round(rgb[1] * self._brightness / 100.0), round(rgb[2] * self._brightness / 100.0), round(self._white * self._brightness / 100.0), ) if self.state == "off": self.set_level(int(self._brightness)) return if self._reset_color: bri255 = scaleto255(self._brightness) self.call_set_color(bri255, bri255, bri255, bri255) if self._supported_flags & SUPPORT_BRIGHTNESS: self.set_level(int(self._brightness)) return # The simplest case is left for last. No dimming, just switch on self.call_turn_on() async def async_turn_off(self, **kwargs): """Turn the light off.""" async with self._update_lock: await self.hass.async_add_executor_job(partial(self._turn_off, **kwargs)) def _turn_off(self, **kwargs): """Really turn the light off.""" # Let's save the last brightness level before we switch it off if ( (self._supported_flags & SUPPORT_BRIGHTNESS) and self._brightness and self._brightness > 0 ): self._last_brightness = self._brightness self._brightness = 0 self.call_turn_off() @property def is_on(self): """Return true if device is on.""" return self.current_binary_state async def async_update(self): """Update the state.""" async with self._update_lock: await self.hass.async_add_executor_job(self._update) def _update(self): """Really update the state.""" # Brightness handling if self._supported_flags & SUPPORT_BRIGHTNESS: self._brightness = float(self.fibaro_device.properties.value) # Fibaro might report 0-99 or 0-100 for brightness, # based on device type, so we round up here if self._brightness > 99: self._brightness = 100 # Color handling if ( self._supported_flags & SUPPORT_COLOR and "color" in self.fibaro_device.properties and "," in self.fibaro_device.properties.color ): # Fibaro communicates the color as an 'R, G, B, W' string rgbw_s = self.fibaro_device.properties.color if rgbw_s == "0,0,0,0" and "lastColorSet" in self.fibaro_device.properties: rgbw_s = self.fibaro_device.properties.lastColorSet rgbw_list = [int(i) for i in rgbw_s.split(",")][:4] if rgbw_list[0] or rgbw_list[1] or rgbw_list[2]: self._color = color_util.color_RGB_to_hs(*rgbw_list[:3]) if (self._supported_flags & SUPPORT_WHITE_VALUE) and self.brightness != 0: self._white = min(255, max(0, rgbw_list[3] * 100.0 / self._brightness))
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/fibaro/light.py
"""Alexa state report code.""" import asyncio import json import logging import aiohttp import async_timeout from homeassistant.const import MATCH_ALL, STATE_ON import homeassistant.util.dt as dt_util from .const import API_CHANGE, Cause from .entities import ENTITY_ADAPTERS from .messages import AlexaResponse _LOGGER = logging.getLogger(__name__) DEFAULT_TIMEOUT = 10 async def async_enable_proactive_mode(hass, smart_home_config): """Enable the proactive mode. Proactive mode makes this component report state changes to Alexa. """ # Validate we can get access token. await smart_home_config.async_get_access_token() async def async_entity_state_listener(changed_entity, old_state, new_state): if not hass.is_running: return if not new_state: return if new_state.domain not in ENTITY_ADAPTERS: return if not smart_home_config.should_expose(changed_entity): _LOGGER.debug("Not exposing %s because filtered by config", changed_entity) return alexa_changed_entity = ENTITY_ADAPTERS[new_state.domain]( hass, smart_home_config, new_state ) for interface in alexa_changed_entity.interfaces(): if interface.properties_proactively_reported(): await async_send_changereport_message( hass, smart_home_config, alexa_changed_entity ) return if ( interface.name() == "Alexa.DoorbellEventSource" and new_state.state == STATE_ON ): await async_send_doorbell_event_message( hass, smart_home_config, alexa_changed_entity ) return return hass.helpers.event.async_track_state_change( MATCH_ALL, async_entity_state_listener ) async def async_send_changereport_message( hass, config, alexa_entity, *, invalidate_access_token=True ): """Send a ChangeReport message for an Alexa entity. https://developer.amazon.com/docs/smarthome/state-reporting-for-a-smart-home-skill.html#report-state-with-changereport-events """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoint = alexa_entity.alexa_id() # this sends all the properties of the Alexa Entity, whether they have # changed or not. this should be improved, and properties that have not # changed should be moved to the 'context' object properties = list(alexa_entity.serialize_properties()) payload = { API_CHANGE: {"cause": {"type": Cause.APP_INTERACTION}, "properties": properties} } message = AlexaResponse(name="ChangeReport", namespace="Alexa", payload=payload) message.set_endpoint_full(token, endpoint) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() try: with async_timeout.timeout(DEFAULT_TIMEOUT): response = await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True, ) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Timeout sending report to Alexa.") return response_text = await response.text() _LOGGER.debug("Sent: %s", json.dumps(message_serialized)) _LOGGER.debug("Received (%s): %s", response.status, response_text) if response.status == 202: return response_json = json.loads(response_text) if ( response_json["payload"]["code"] == "INVALID_ACCESS_TOKEN_EXCEPTION" and not invalidate_access_token ): config.async_invalidate_access_token() return await async_send_changereport_message( hass, config, alexa_entity, invalidate_access_token=False ) _LOGGER.error( "Error when sending ChangeReport to Alexa: %s: %s", response_json["payload"]["code"], response_json["payload"]["description"], ) async def async_send_add_or_update_message(hass, config, entity_ids): """Send an AddOrUpdateReport message for entities. https://developer.amazon.com/docs/device-apis/alexa-discovery.html#add-or-update-report """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoints = [] for entity_id in entity_ids: domain = entity_id.split(".", 1)[0] if domain not in ENTITY_ADAPTERS: continue alexa_entity = ENTITY_ADAPTERS[domain](hass, config, hass.states.get(entity_id)) endpoints.append(alexa_entity.serialize_discovery()) payload = {"endpoints": endpoints, "scope": {"type": "BearerToken", "token": token}} message = AlexaResponse( name="AddOrUpdateReport", namespace="Alexa.Discovery", payload=payload ) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() return await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True ) async def async_send_delete_message(hass, config, entity_ids): """Send an DeleteReport message for entities. https://developer.amazon.com/docs/device-apis/alexa-discovery.html#deletereport-event """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoints = [] for entity_id in entity_ids: domain = entity_id.split(".", 1)[0] if domain not in ENTITY_ADAPTERS: continue alexa_entity = ENTITY_ADAPTERS[domain](hass, config, hass.states.get(entity_id)) endpoints.append({"endpointId": alexa_entity.alexa_id()}) payload = {"endpoints": endpoints, "scope": {"type": "BearerToken", "token": token}} message = AlexaResponse( name="DeleteReport", namespace="Alexa.Discovery", payload=payload ) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() return await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True ) async def async_send_doorbell_event_message(hass, config, alexa_entity): """Send a DoorbellPress event message for an Alexa entity. https://developer.amazon.com/docs/smarthome/send-events-to-the-alexa-event-gateway.html """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoint = alexa_entity.alexa_id() message = AlexaResponse( name="DoorbellPress", namespace="Alexa.DoorbellEventSource", payload={ "cause": {"type": Cause.PHYSICAL_INTERACTION}, "timestamp": f"{dt_util.utcnow().replace(tzinfo=None).isoformat()}Z", }, ) message.set_endpoint_full(token, endpoint) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() try: with async_timeout.timeout(DEFAULT_TIMEOUT): response = await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True, ) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Timeout sending report to Alexa.") return response_text = await response.text() _LOGGER.debug("Sent: %s", json.dumps(message_serialized)) _LOGGER.debug("Received (%s): %s", response.status, response_text) if response.status == 202: return response_json = json.loads(response_text) _LOGGER.error( "Error when sending DoorbellPress event to Alexa: %s: %s", response_json["payload"]["code"], response_json["payload"]["description"], )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/alexa/state_report.py
"""Support for Nest Cameras.""" from datetime import timedelta import logging import requests from homeassistant.components import nest from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_ON_OFF, Camera from homeassistant.util.dt import utcnow _LOGGER = logging.getLogger(__name__) NEST_BRAND = "Nest" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({}) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a Nest Cam. No longer in use. """ async def async_setup_entry(hass, entry, async_add_entities): """Set up a Nest sensor based on a config entry.""" camera_devices = await hass.async_add_job(hass.data[nest.DATA_NEST].cameras) cameras = [NestCamera(structure, device) for structure, device in camera_devices] async_add_entities(cameras, True) class NestCamera(Camera): """Representation of a Nest Camera.""" def __init__(self, structure, device): """Initialize a Nest Camera.""" super().__init__() self.structure = structure self.device = device self._location = None self._name = None self._online = None self._is_streaming = None self._is_video_history_enabled = False # Default to non-NestAware subscribed, but will be fixed during update self._time_between_snapshots = timedelta(seconds=30) self._last_image = None self._next_snapshot_at = None @property def name(self): """Return the name of the nest, if any.""" return self._name @property def unique_id(self): """Return the serial number.""" return self.device.device_id @property def device_info(self): """Return information about the device.""" return { "identifiers": {(nest.DOMAIN, self.device.device_id)}, "name": self.device.name_long, "manufacturer": "Nest Labs", "model": "Camera", } @property def should_poll(self): """Nest camera should poll periodically.""" return True @property def is_recording(self): """Return true if the device is recording.""" return self._is_streaming @property def brand(self): """Return the brand of the camera.""" return NEST_BRAND @property def supported_features(self): """Nest Cam support turn on and off.""" return SUPPORT_ON_OFF @property def is_on(self): """Return true if on.""" return self._online and self._is_streaming def turn_off(self): """Turn off camera.""" _LOGGER.debug("Turn off camera %s", self._name) # Calling Nest API in is_streaming setter. # device.is_streaming would not immediately change until the process # finished in Nest Cam. self.device.is_streaming = False def turn_on(self): """Turn on camera.""" if not self._online: _LOGGER.error("Camera %s is offline.", self._name) return _LOGGER.debug("Turn on camera %s", self._name) # Calling Nest API in is_streaming setter. # device.is_streaming would not immediately change until the process # finished in Nest Cam. self.device.is_streaming = True def update(self): """Cache value from Python-nest.""" self._location = self.device.where self._name = self.device.name self._online = self.device.online self._is_streaming = self.device.is_streaming self._is_video_history_enabled = self.device.is_video_history_enabled if self._is_video_history_enabled: # NestAware allowed 10/min self._time_between_snapshots = timedelta(seconds=6) else: # Otherwise, 2/min self._time_between_snapshots = timedelta(seconds=30) def _ready_for_snapshot(self, now): return self._next_snapshot_at is None or now > self._next_snapshot_at def camera_image(self): """Return a still image response from the camera.""" now = utcnow() if self._ready_for_snapshot(now): url = self.device.snapshot_url try: response = requests.get(url) except requests.exceptions.RequestException as error: _LOGGER.error("Error getting camera image: %s", error) return None self._next_snapshot_at = now + self._time_between_snapshots self._last_image = response.content return self._last_image
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/nest/camera.py
"""Support for RFXtrx binary sensors.""" import logging import RFXtrx as rfxtrxmod import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import ( CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_DEVICE_CLASS, CONF_NAME, ) from homeassistant.helpers import config_validation as cv, event as evt from homeassistant.util import dt as dt_util, slugify from . import ( ATTR_NAME, CONF_AUTOMATIC_ADD, CONF_DATA_BITS, CONF_DEVICES, CONF_FIRE_EVENT, CONF_OFF_DELAY, RECEIVED_EVT_SUBSCRIBERS, RFX_DEVICES, apply_received_command, find_possible_pt2262_device, get_pt2262_cmd, get_pt2262_device, get_pt2262_deviceid, get_rfx_object, ) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_DEVICES, default={}): { cv.string: vol.Schema( { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean, vol.Optional(CONF_OFF_DELAY): vol.Any( cv.time_period, cv.positive_timedelta ), vol.Optional(CONF_DATA_BITS): cv.positive_int, vol.Optional(CONF_COMMAND_ON): cv.byte, vol.Optional(CONF_COMMAND_OFF): cv.byte, } ) }, vol.Optional(CONF_AUTOMATIC_ADD, default=False): cv.boolean, }, extra=vol.ALLOW_EXTRA, ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Binary Sensor platform to RFXtrx.""" sensors = [] for packet_id, entity in config[CONF_DEVICES].items(): event = get_rfx_object(packet_id) device_id = slugify(event.device.id_string.lower()) if device_id in RFX_DEVICES: continue if entity.get(CONF_DATA_BITS) is not None: _LOGGER.debug( "Masked device id: %s", get_pt2262_deviceid(device_id, entity.get(CONF_DATA_BITS)), ) _LOGGER.debug( "Add %s rfxtrx.binary_sensor (class %s)", entity[ATTR_NAME], entity.get(CONF_DEVICE_CLASS), ) device = RfxtrxBinarySensor( event, entity.get(CONF_NAME), entity.get(CONF_DEVICE_CLASS), entity[CONF_FIRE_EVENT], entity.get(CONF_OFF_DELAY), entity.get(CONF_DATA_BITS), entity.get(CONF_COMMAND_ON), entity.get(CONF_COMMAND_OFF), ) device.hass = hass sensors.append(device) RFX_DEVICES[device_id] = device add_entities(sensors) def binary_sensor_update(event): """Call for control updates from the RFXtrx gateway.""" if not isinstance(event, rfxtrxmod.ControlEvent): return device_id = slugify(event.device.id_string.lower()) sensor = RFX_DEVICES.get(device_id, get_pt2262_device(device_id)) if sensor is None: # Add the entity if not exists and automatic_add is True if not config[CONF_AUTOMATIC_ADD]: return if event.device.packettype == 0x13: poss_dev = find_possible_pt2262_device(device_id) if poss_dev is not None: poss_id = slugify(poss_dev.event.device.id_string.lower()) _LOGGER.debug("Found possible matching device ID: %s", poss_id) pkt_id = "".join(f"{x:02x}" for x in event.data) sensor = RfxtrxBinarySensor(event, pkt_id) sensor.hass = hass RFX_DEVICES[device_id] = sensor add_entities([sensor]) _LOGGER.info( "Added binary sensor %s (Device ID: %s Class: %s Sub: %s)", pkt_id, slugify(event.device.id_string.lower()), event.device.__class__.__name__, event.device.subtype, ) elif not isinstance(sensor, RfxtrxBinarySensor): return else: _LOGGER.debug( "Binary sensor update (Device ID: %s Class: %s Sub: %s)", slugify(event.device.id_string.lower()), event.device.__class__.__name__, event.device.subtype, ) if sensor.is_lighting4: if sensor.data_bits is not None: cmd = get_pt2262_cmd(device_id, sensor.data_bits) sensor.apply_cmd(int(cmd, 16)) else: sensor.update_state(True) else: apply_received_command(event) if ( sensor.is_on and sensor.off_delay is not None and sensor.delay_listener is None ): def off_delay_listener(now): """Switch device off after a delay.""" sensor.delay_listener = None sensor.update_state(False) sensor.delay_listener = evt.track_point_in_time( hass, off_delay_listener, dt_util.utcnow() + sensor.off_delay ) # Subscribe to main RFXtrx events if binary_sensor_update not in RECEIVED_EVT_SUBSCRIBERS: RECEIVED_EVT_SUBSCRIBERS.append(binary_sensor_update) class RfxtrxBinarySensor(BinarySensorEntity): """A representation of a RFXtrx binary sensor.""" def __init__( self, event, name, device_class=None, should_fire=False, off_delay=None, data_bits=None, cmd_on=None, cmd_off=None, ): """Initialize the RFXtrx sensor.""" self.event = event self._name = name self._should_fire_event = should_fire self._device_class = device_class self._off_delay = off_delay self._state = False self.is_lighting4 = event.device.packettype == 0x13 self.delay_listener = None self._data_bits = data_bits self._cmd_on = cmd_on self._cmd_off = cmd_off self._unique_id = f"{slugify(self.event.device.type_string.lower())}_{slugify(self.event.device.id_string.lower())}" if data_bits is not None: self._masked_id = get_pt2262_deviceid( event.device.id_string.lower(), data_bits ) else: self._masked_id = None @property def name(self): """Return the device name.""" return self._name @property def masked_id(self): """Return the masked device id (isolated address bits).""" return self._masked_id @property def data_bits(self): """Return the number of data bits.""" return self._data_bits @property def cmd_on(self): """Return the value of the 'On' command.""" return self._cmd_on @property def cmd_off(self): """Return the value of the 'Off' command.""" return self._cmd_off @property def should_poll(self): """No polling needed.""" return False @property def should_fire_event(self): """Return is the device must fire event.""" return self._should_fire_event @property def device_class(self): """Return the sensor class.""" return self._device_class @property def off_delay(self): """Return the off_delay attribute value.""" return self._off_delay @property def is_on(self): """Return true if the sensor state is True.""" return self._state @property def unique_id(self): """Return unique identifier of remote device.""" return self._unique_id def apply_cmd(self, cmd): """Apply a command for updating the state.""" if cmd == self.cmd_on: self.update_state(True) elif cmd == self.cmd_off: self.update_state(False) def update_state(self, state): """Update the state of the device.""" self._state = state self.schedule_update_ha_state()
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/rfxtrx/binary_sensor.py
"""Define a config flow manager for AirVisual.""" import asyncio from pyairvisual import Client from pyairvisual.errors import InvalidKeyError, NodeProError import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_API_KEY, CONF_IP_ADDRESS, CONF_LATITUDE, CONF_LONGITUDE, CONF_PASSWORD, CONF_SHOW_ON_MAP, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from . import async_get_geography_id from .const import ( # pylint: disable=unused-import CONF_GEOGRAPHIES, CONF_INTEGRATION_TYPE, DOMAIN, INTEGRATION_TYPE_GEOGRAPHY, INTEGRATION_TYPE_NODE_PRO, LOGGER, ) class AirVisualFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle an AirVisual config flow.""" VERSION = 2 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL @property def geography_schema(self): """Return the data schema for the cloud API.""" return vol.Schema( { vol.Required(CONF_API_KEY): str, vol.Required( CONF_LATITUDE, default=self.hass.config.latitude ): cv.latitude, vol.Required( CONF_LONGITUDE, default=self.hass.config.longitude ): cv.longitude, } ) @property def pick_integration_type_schema(self): """Return the data schema for picking the integration type.""" return vol.Schema( { vol.Required("type"): vol.In( [INTEGRATION_TYPE_GEOGRAPHY, INTEGRATION_TYPE_NODE_PRO] ) } ) @property def node_pro_schema(self): """Return the data schema for a Node/Pro.""" return vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): str} ) async def _async_set_unique_id(self, unique_id): """Set the unique ID of the config flow and abort if it already exists.""" await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() @staticmethod @callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return AirVisualOptionsFlowHandler(config_entry) async def async_step_geography(self, user_input=None): """Handle the initialization of the integration via the cloud API.""" if not user_input: return self.async_show_form( step_id="geography", data_schema=self.geography_schema ) geo_id = async_get_geography_id(user_input) await self._async_set_unique_id(geo_id) self._abort_if_unique_id_configured() # Find older config entries without unique ID: for entry in self._async_current_entries(): if entry.version != 1: continue if any( geo_id == async_get_geography_id(geography) for geography in entry.data[CONF_GEOGRAPHIES] ): return self.async_abort(reason="already_configured") websession = aiohttp_client.async_get_clientsession(self.hass) client = Client(session=websession, api_key=user_input[CONF_API_KEY]) # If this is the first (and only the first) time we've seen this API key, check # that it's valid: checked_keys = self.hass.data.setdefault("airvisual_checked_api_keys", set()) check_keys_lock = self.hass.data.setdefault( "airvisual_checked_api_keys_lock", asyncio.Lock() ) async with check_keys_lock: if user_input[CONF_API_KEY] not in checked_keys: try: await client.api.nearest_city() except InvalidKeyError: return self.async_show_form( step_id="geography", data_schema=self.geography_schema, errors={CONF_API_KEY: "invalid_api_key"}, ) checked_keys.add(user_input[CONF_API_KEY]) return self.async_create_entry( title=f"Cloud API ({geo_id})", data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY}, ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_geography(import_config) async def async_step_node_pro(self, user_input=None): """Handle the initialization of the integration with a Node/Pro.""" if not user_input: return self.async_show_form( step_id="node_pro", data_schema=self.node_pro_schema ) await self._async_set_unique_id(user_input[CONF_IP_ADDRESS]) websession = aiohttp_client.async_get_clientsession(self.hass) client = Client(session=websession) try: await client.node.from_samba( user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD], include_history=False, include_trends=False, ) except NodeProError as err: LOGGER.error("Error connecting to Node/Pro unit: %s", err) return self.async_show_form( step_id="node_pro", data_schema=self.node_pro_schema, errors={CONF_IP_ADDRESS: "unable_to_connect"}, ) return self.async_create_entry( title=f"Node/Pro ({user_input[CONF_IP_ADDRESS]})", data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO}, ) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return self.async_show_form( step_id="user", data_schema=self.pick_integration_type_schema ) if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY: return await self.async_step_geography() return await self.async_step_node_pro() class AirVisualOptionsFlowHandler(config_entries.OptionsFlow): """Handle an AirVisual options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required( CONF_SHOW_ON_MAP, default=self.config_entry.options.get(CONF_SHOW_ON_MAP), ): bool } ), )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/airvisual/config_flow.py
"""Support for binary sensor using I2C MCP23017 chip.""" import logging from adafruit_mcp230xx.mcp23017 import MCP23017 # pylint: disable=import-error import board # pylint: disable=import-error import busio # pylint: disable=import-error import digitalio # pylint: disable=import-error import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity from homeassistant.const import DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_INVERT_LOGIC = "invert_logic" CONF_I2C_ADDRESS = "i2c_address" CONF_PINS = "pins" CONF_PULL_MODE = "pull_mode" MODE_UP = "UP" MODE_DOWN = "DOWN" DEFAULT_INVERT_LOGIC = False DEFAULT_I2C_ADDRESS = 0x20 DEFAULT_PULL_MODE = MODE_UP _SENSORS_SCHEMA = vol.Schema({cv.positive_int: cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_PINS): _SENSORS_SCHEMA, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): vol.All( vol.Upper, vol.In([MODE_UP, MODE_DOWN]) ), vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int), } ) def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the MCP23017 binary sensors.""" pull_mode = config[CONF_PULL_MODE] invert_logic = config[CONF_INVERT_LOGIC] i2c_address = config[CONF_I2C_ADDRESS] i2c = busio.I2C(board.SCL, board.SDA) mcp = MCP23017(i2c, address=i2c_address) binary_sensors = [] pins = config[CONF_PINS] for pin_num, pin_name in pins.items(): pin = mcp.get_pin(pin_num) binary_sensors.append( MCP23017BinarySensor(pin_name, pin, pull_mode, invert_logic) ) add_devices(binary_sensors, True) class MCP23017BinarySensor(BinarySensorEntity): """Represent a binary sensor that uses MCP23017.""" def __init__(self, name, pin, pull_mode, invert_logic): """Initialize the MCP23017 binary sensor.""" self._name = name or DEVICE_DEFAULT_NAME self._pin = pin self._pull_mode = pull_mode self._invert_logic = invert_logic self._state = None self._pin.direction = digitalio.Direction.INPUT self._pin.pull = digitalio.Pull.UP @property def name(self): """Return the name of the sensor.""" return self._name @property def is_on(self): """Return the state of the entity.""" return self._state != self._invert_logic def update(self): """Update the GPIO state.""" self._state = self._pin.value
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/mcp23017/binary_sensor.py
"""Support for OpenTherm Gateway devices.""" import asyncio from datetime import date, datetime import logging import pyotgw import pyotgw.vars as gw_vars import voluptuous as vol from homeassistant.components.binary_sensor import DOMAIN as COMP_BINARY_SENSOR from homeassistant.components.climate import DOMAIN as COMP_CLIMATE from homeassistant.components.sensor import DOMAIN as COMP_SENSOR from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( ATTR_DATE, ATTR_ID, ATTR_MODE, ATTR_TEMPERATURE, ATTR_TIME, CONF_DEVICE, CONF_ID, CONF_NAME, EVENT_HOMEASSISTANT_STOP, PRECISION_HALVES, PRECISION_TENTHS, PRECISION_WHOLE, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( ATTR_DHW_OVRD, ATTR_GW_ID, ATTR_LEVEL, CONF_CLIMATE, CONF_FLOOR_TEMP, CONF_PRECISION, DATA_GATEWAYS, DATA_OPENTHERM_GW, DOMAIN, SERVICE_RESET_GATEWAY, SERVICE_SET_CLOCK, SERVICE_SET_CONTROL_SETPOINT, SERVICE_SET_GPIO_MODE, SERVICE_SET_HOT_WATER_OVRD, SERVICE_SET_HOT_WATER_SETPOINT, SERVICE_SET_LED_MODE, SERVICE_SET_MAX_MOD, SERVICE_SET_OAT, SERVICE_SET_SB_TEMP, ) _LOGGER = logging.getLogger(__name__) CLIMATE_SCHEMA = vol.Schema( { vol.Optional(CONF_PRECISION): vol.In( [PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE] ), vol.Optional(CONF_FLOOR_TEMP, default=False): cv.boolean, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: cv.schema_with_slug_keys( { vol.Required(CONF_DEVICE): cv.string, vol.Optional(CONF_CLIMATE, default={}): CLIMATE_SCHEMA, vol.Optional(CONF_NAME): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) async def options_updated(hass, entry): """Handle options update.""" gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] async_dispatcher_send(hass, gateway.options_update_signal, entry) async def async_setup_entry(hass, config_entry): """Set up the OpenTherm Gateway component.""" if DATA_OPENTHERM_GW not in hass.data: hass.data[DATA_OPENTHERM_GW] = {DATA_GATEWAYS: {}} gateway = OpenThermGatewayDevice(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway config_entry.add_update_listener(options_updated) # Schedule directly on the loop to avoid blocking HA startup. hass.loop.create_task(gateway.connect_and_subscribe()) for comp in [COMP_BINARY_SENSOR, COMP_CLIMATE, COMP_SENSOR]: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, comp) ) register_services(hass) return True async def async_setup(hass, config): """Set up the OpenTherm Gateway component.""" if not hass.config_entries.async_entries(DOMAIN) and DOMAIN in config: conf = config[DOMAIN] for device_id, device_config in conf.items(): device_config[CONF_ID] = device_id hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=device_config ) ) return True def register_services(hass): """Register services for the component.""" service_reset_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ) } ) service_set_clock_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Optional(ATTR_DATE, default=date.today()): cv.date, vol.Optional(ATTR_TIME, default=datetime.now().time()): cv.time, } ) service_set_control_setpoint_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=90) ), } ) service_set_hot_water_setpoint_schema = service_set_control_setpoint_schema service_set_hot_water_ovrd_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_DHW_OVRD): vol.Any( vol.Equal("A"), vol.All(vol.Coerce(int), vol.Range(min=0, max=1)) ), } ) service_set_gpio_mode_schema = vol.Schema( vol.Any( vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("A"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=6) ), } ), vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("B"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=7) ), } ), ) ) service_set_led_mode_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.In("ABCDEF"), vol.Required(ATTR_MODE): vol.In("RXTBOFHWCEMP"), } ) service_set_max_mod_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_LEVEL): vol.All( vol.Coerce(int), vol.Range(min=-1, max=100) ), } ) service_set_oat_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=-40, max=99) ), } ) service_set_sb_temp_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=30) ), } ) async def reset_gateway(call): """Reset the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] mode_rst = gw_vars.OTGW_MODE_RESET status = await gw_dev.gateway.set_mode(mode_rst) gw_dev.status = status async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_RESET_GATEWAY, reset_gateway, service_reset_schema ) async def set_control_setpoint(call): """Set the control setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_CONTROL_SETPOINT value = await gw_dev.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_CONTROL_SETPOINT, set_control_setpoint, service_set_control_setpoint_schema, ) async def set_dhw_ovrd(call): """Set the domestic hot water override on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_DHW_OVRD value = await gw_dev.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_OVRD, set_dhw_ovrd, service_set_hot_water_ovrd_schema, ) async def set_dhw_setpoint(call): """Set the domestic hot water setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_DHW_SETPOINT value = await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_SETPOINT, set_dhw_setpoint, service_set_hot_water_setpoint_schema, ) async def set_device_clock(call): """Set the clock on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] attr_date = call.data[ATTR_DATE] attr_time = call.data[ATTR_TIME] await gw_dev.gateway.set_clock(datetime.combine(attr_date, attr_time)) hass.services.async_register( DOMAIN, SERVICE_SET_CLOCK, set_device_clock, service_set_clock_schema ) async def set_gpio_mode(call): """Set the OpenTherm Gateway GPIO modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gpio_id = call.data[ATTR_ID] gpio_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_gpio_mode(gpio_id, gpio_mode) gpio_var = getattr(gw_vars, f"OTGW_GPIO_{gpio_id}") gw_dev.status.update({gpio_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema ) async def set_led_mode(call): """Set the OpenTherm Gateway LED modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] led_id = call.data[ATTR_ID] led_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_led_mode(led_id, led_mode) led_var = getattr(gw_vars, f"OTGW_LED_{led_id}") gw_dev.status.update({led_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema ) async def set_max_mod(call): """Set the max modulation level.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD level = call.data[ATTR_LEVEL] if level == -1: # Backend only clears setting on non-numeric values. level = "-" value = await gw_dev.gateway.set_max_relative_mod(level) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_MAX_MOD, set_max_mod, service_set_max_mod_schema ) async def set_outside_temp(call): """Provide the outside temperature to the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_OUTSIDE_TEMP value = await gw_dev.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_OAT, set_outside_temp, service_set_oat_schema ) async def set_setback_temp(call): """Set the OpenTherm Gateway SetBack temperature.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_SB_TEMP value = await gw_dev.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_SB_TEMP, set_setback_temp, service_set_sb_temp_schema ) async def async_unload_entry(hass, entry): """Cleanup and disconnect from gateway.""" await asyncio.gather( hass.config_entries.async_forward_entry_unload(entry, COMP_BINARY_SENSOR), hass.config_entries.async_forward_entry_unload(entry, COMP_CLIMATE), hass.config_entries.async_forward_entry_unload(entry, COMP_SENSOR), ) gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] await gateway.cleanup() return True class OpenThermGatewayDevice: """OpenTherm Gateway device class.""" def __init__(self, hass, config_entry): """Initialize the OpenTherm Gateway.""" self.hass = hass self.device_path = config_entry.data[CONF_DEVICE] self.gw_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] self.climate_config = config_entry.options self.status = {} self.update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_update" self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_options_update" self.gateway = pyotgw.pyotgw() self.gw_version = None async def cleanup(self, event=None): """Reset overrides on the gateway.""" await self.gateway.set_control_setpoint(0) await self.gateway.set_max_relative_mod("-") await self.gateway.disconnect() async def connect_and_subscribe(self): """Connect to serial device and subscribe report handler.""" self.status = await self.gateway.connect(self.hass.loop, self.device_path) _LOGGER.debug("Connected to OpenTherm Gateway at %s", self.device_path) self.gw_version = self.status.get(gw_vars.OTGW_BUILD) self.hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, self.cleanup) async def handle_report(status): """Handle reports from the OpenTherm Gateway.""" _LOGGER.debug("Received report: %s", status) self.status = status async_dispatcher_send(self.hass, self.update_signal, status) self.gateway.subscribe(handle_report)
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/opentherm_gw/__init__.py
"""The broadlink component.""" import asyncio from base64 import b64decode, b64encode from binascii import unhexlify import logging import re from broadlink.exceptions import BroadlinkException, ReadError, StorageError import voluptuous as vol from homeassistant.const import CONF_HOST import homeassistant.helpers.config_validation as cv from homeassistant.util.dt import utcnow from .const import CONF_PACKET, DOMAIN, LEARNING_TIMEOUT, SERVICE_LEARN, SERVICE_SEND _LOGGER = logging.getLogger(__name__) DEFAULT_RETRY = 3 def data_packet(value): """Decode a data packet given for broadlink.""" value = cv.string(value) extra = len(value) % 4 if extra > 0: value = value + ("=" * (4 - extra)) return b64decode(value) def hostname(value): """Validate a hostname.""" host = str(value) if len(host) > 253: raise ValueError if host[-1] == ".": host = host[:-1] allowed = re.compile(r"(?![_-])[a-z\d_-]{1,63}(?<![_-])$", flags=re.IGNORECASE) if not all(allowed.match(elem) for elem in host.split(".")): raise ValueError return host def mac_address(value): """Validate and coerce a 48-bit MAC address.""" mac = str(value).lower() if len(mac) == 17: mac = mac[0:2] + mac[3:5] + mac[6:8] + mac[9:11] + mac[12:14] + mac[15:17] elif len(mac) == 14: mac = mac[0:2] + mac[2:4] + mac[5:7] + mac[7:9] + mac[10:12] + mac[12:14] elif len(mac) != 12: raise ValueError return unhexlify(mac) SERVICE_SEND_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PACKET): vol.All(cv.ensure_list, [data_packet]), } ) SERVICE_LEARN_SCHEMA = vol.Schema({vol.Required(CONF_HOST): cv.string}) async def async_setup_service(hass, host, device): """Register a device for given host for use in services.""" hass.data.setdefault(DOMAIN, {})[host] = device if hass.services.has_service(DOMAIN, SERVICE_LEARN): return async def async_learn_command(call): """Learn a packet from remote.""" device = hass.data[DOMAIN][call.data[CONF_HOST]] try: await device.async_request(device.api.enter_learning) except BroadlinkException as err_msg: _LOGGER.error("Failed to enter learning mode: %s", err_msg) return _LOGGER.info("Press the key you want Home Assistant to learn") start_time = utcnow() while (utcnow() - start_time) < LEARNING_TIMEOUT: await asyncio.sleep(1) try: packet = await device.async_request(device.api.check_data) except (ReadError, StorageError): continue except BroadlinkException as err_msg: _LOGGER.error("Failed to learn: %s", err_msg) return else: data = b64encode(packet).decode("utf8") log_msg = f"Received packet is: {data}" _LOGGER.info(log_msg) hass.components.persistent_notification.async_create( log_msg, title="Broadlink switch" ) return _LOGGER.error("Failed to learn: No signal received") hass.components.persistent_notification.async_create( "No signal was received", title="Broadlink switch" ) hass.services.async_register( DOMAIN, SERVICE_LEARN, async_learn_command, schema=SERVICE_LEARN_SCHEMA ) async def async_send_packet(call): """Send a packet.""" device = hass.data[DOMAIN][call.data[CONF_HOST]] packets = call.data[CONF_PACKET] for packet in packets: try: await device.async_request(device.api.send_data, packet) except BroadlinkException as err_msg: _LOGGER.error("Failed to send packet: %s", err_msg) return hass.services.async_register( DOMAIN, SERVICE_SEND, async_send_packet, schema=SERVICE_SEND_SCHEMA )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/broadlink/__init__.py
"""Support for a Genius Hub system.""" from datetime import timedelta import logging from typing import Any, Dict, Optional import aiohttp from geniushubclient import GeniusHub import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME, TEMP_CELSIUS, ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.service import verify_domain_control from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) DOMAIN = "geniushub" # temperature is repeated here, as it gives access to high-precision temps GH_ZONE_ATTRS = ["mode", "temperature", "type", "occupied", "override"] GH_DEVICE_ATTRS = { "luminance": "luminance", "measuredTemperature": "measured_temperature", "occupancyTrigger": "occupancy_trigger", "setback": "setback", "setTemperature": "set_temperature", "wakeupInterval": "wakeup_interval", } SCAN_INTERVAL = timedelta(seconds=60) MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$" V1_API_SCHEMA = vol.Schema( { vol.Required(CONF_TOKEN): cv.string, vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), } ) V3_API_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), } ) CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.Any(V3_API_SCHEMA, V1_API_SCHEMA)}, extra=vol.ALLOW_EXTRA ) ATTR_ZONE_MODE = "mode" ATTR_DURATION = "duration" SVC_SET_ZONE_MODE = "set_zone_mode" SVC_SET_ZONE_OVERRIDE = "set_zone_override" SET_ZONE_MODE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ZONE_MODE): vol.In(["off", "timer", "footprint"]), } ) SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=4, max=28) ), vol.Optional(ATTR_DURATION): vol.All( cv.time_period, vol.Range(min=timedelta(minutes=5), max=timedelta(days=1)), ), } ) async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a Genius Hub system.""" hass.data[DOMAIN] = {} kwargs = dict(config[DOMAIN]) if CONF_HOST in kwargs: args = (kwargs.pop(CONF_HOST),) else: args = (kwargs.pop(CONF_TOKEN),) hub_uid = kwargs.pop(CONF_MAC, None) client = GeniusHub(*args, **kwargs, session=async_get_clientsession(hass)) broker = hass.data[DOMAIN]["broker"] = GeniusBroker(hass, client, hub_uid) try: await client.update() except aiohttp.ClientResponseError as err: _LOGGER.error("Setup failed, check your configuration, %s", err) return False broker.make_debug_log_entries() async_track_time_interval(hass, broker.async_update, SCAN_INTERVAL) for platform in ["climate", "water_heater", "sensor", "binary_sensor", "switch"]: hass.async_create_task(async_load_platform(hass, platform, DOMAIN, {}, config)) setup_service_functions(hass, broker) return True @callback def setup_service_functions(hass: HomeAssistantType, broker): """Set up the service functions.""" @verify_domain_control(hass, DOMAIN) async def set_zone_mode(call) -> None: """Set the system mode.""" entity_id = call.data[ATTR_ENTITY_ID] registry = await hass.helpers.entity_registry.async_get_registry() registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register( DOMAIN, SVC_SET_ZONE_MODE, set_zone_mode, schema=SET_ZONE_MODE_SCHEMA ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_mode, schema=SET_ZONE_OVERRIDE_SCHEMA ) class GeniusBroker: """Container for geniushub client and data.""" def __init__(self, hass, client, hub_uid) -> None: """Initialize the geniushub client.""" self.hass = hass self.client = client self._hub_uid = hub_uid self._connect_error = False @property def hub_uid(self) -> int: """Return the Hub UID (MAC address).""" # pylint: disable=no-member return self._hub_uid if self._hub_uid is not None else self.client.uid async def async_update(self, now, **kwargs) -> None: """Update the geniushub client's data.""" try: await self.client.update() if self._connect_error: self._connect_error = False _LOGGER.warning("Connection to geniushub re-established") except ( aiohttp.ClientResponseError, aiohttp.client_exceptions.ClientConnectorError, ) as err: if not self._connect_error: self._connect_error = True _LOGGER.warning( "Connection to geniushub failed (unable to update), message is: %s", err, ) return self.make_debug_log_entries() async_dispatcher_send(self.hass, DOMAIN) def make_debug_log_entries(self) -> None: """Make any useful debug log entries.""" # pylint: disable=protected-access _LOGGER.debug( "Raw JSON: \n\nclient._zones = %s \n\nclient._devices = %s", self.client._zones, self.client._devices, ) class GeniusEntity(Entity): """Base for all Genius Hub entities.""" def __init__(self) -> None: """Initialize the entity.""" self._unique_id = self._name = None async def async_added_to_hass(self) -> None: """Set up a listener when this entity is added to HA.""" self.async_on_remove(async_dispatcher_connect(self.hass, DOMAIN, self._refresh)) async def _refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" self.async_schedule_update_ha_state(force_refresh=True) @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name of the geniushub entity.""" return self._name @property def should_poll(self) -> bool: """Return False as geniushub entities should not be polled.""" return False class GeniusDevice(GeniusEntity): """Base for all Genius Hub devices.""" def __init__(self, broker, device) -> None: """Initialize the Device.""" super().__init__() self._device = device self._unique_id = f"{broker.hub_uid}_device_{device.id}" self._last_comms = self._state_attr = None @property def device_state_attributes(self) -> Dict[str, Any]: """Return the device state attributes.""" attrs = {} attrs["assigned_zone"] = self._device.data["assignedZones"][0]["name"] if self._last_comms: attrs["last_comms"] = self._last_comms.isoformat() state = dict(self._device.data["state"]) if "_state" in self._device.data: # only via v3 API state.update(self._device.data["_state"]) attrs["state"] = { GH_DEVICE_ATTRS[k]: v for k, v in state.items() if k in GH_DEVICE_ATTRS } return attrs async def async_update(self) -> None: """Update an entity's state data.""" if "_state" in self._device.data: # only via v3 API self._last_comms = dt_util.utc_from_timestamp( self._device.data["_state"]["lastComms"] ) class GeniusZone(GeniusEntity): """Base for all Genius Hub zones.""" def __init__(self, broker, zone) -> None: """Initialize the Zone.""" super().__init__() self._zone = zone self._unique_id = f"{broker.hub_uid}_zone_{zone.id}" async def _refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" if payload is None: self.async_schedule_update_ha_state(force_refresh=True) return if payload["unique_id"] != self._unique_id: return if payload["service"] == SVC_SET_ZONE_OVERRIDE: temperature = round(payload["data"][ATTR_TEMPERATURE] * 10) / 10 duration = payload["data"].get(ATTR_DURATION, timedelta(hours=1)) await self._zone.set_override(temperature, int(duration.total_seconds())) return mode = payload["data"][ATTR_ZONE_MODE] # pylint: disable=protected-access if mode == "footprint" and not self._zone._has_pir: raise TypeError( f"'{self.entity_id}' can not support footprint mode (it has no PIR)" ) await self._zone.set_mode(mode) @property def name(self) -> str: """Return the name of the climate device.""" return self._zone.name @property def device_state_attributes(self) -> Dict[str, Any]: """Return the device state attributes.""" status = {k: v for k, v in self._zone.data.items() if k in GH_ZONE_ATTRS} return {"status": status} class GeniusHeatingZone(GeniusZone): """Base for Genius Heating Zones.""" def __init__(self, broker, zone) -> None: """Initialize the Zone.""" super().__init__(broker, zone) self._max_temp = self._min_temp = self._supported_features = None @property def current_temperature(self) -> Optional[float]: """Return the current temperature.""" return self._zone.data.get("temperature") @property def target_temperature(self) -> float: """Return the temperature we try to reach.""" return self._zone.data["setpoint"] @property def min_temp(self) -> float: """Return max valid temperature that can be set.""" return self._min_temp @property def max_temp(self) -> float: """Return max valid temperature that can be set.""" return self._max_temp @property def temperature_unit(self) -> str: """Return the unit of measurement.""" return TEMP_CELSIUS @property def supported_features(self) -> int: """Return the bitmask of supported features.""" return self._supported_features async def async_set_temperature(self, **kwargs) -> None: """Set a new target temperature for this zone.""" await self._zone.set_override( kwargs[ATTR_TEMPERATURE], kwargs.get(ATTR_DURATION, 3600) )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/geniushub/__init__.py
"""Offer webhook triggered automation rules.""" from functools import partial import logging from aiohttp import hdrs import voluptuous as vol from homeassistant.const import CONF_PLATFORM, CONF_WEBHOOK_ID from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from . import DOMAIN as AUTOMATION_DOMAIN # mypy: allow-untyped-defs DEPENDENCIES = ("webhook",) _LOGGER = logging.getLogger(__name__) TRIGGER_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): "webhook", vol.Required(CONF_WEBHOOK_ID): cv.string} ) async def _handle_webhook(action, hass, webhook_id, request): """Handle incoming webhook.""" result = {"platform": "webhook", "webhook_id": webhook_id} if "json" in request.headers.get(hdrs.CONTENT_TYPE, ""): result["json"] = await request.json() else: result["data"] = await request.post() result["query"] = request.query hass.async_run_job(action, {"trigger": result}) async def async_attach_trigger(hass, config, action, automation_info): """Trigger based on incoming webhooks.""" webhook_id = config.get(CONF_WEBHOOK_ID) hass.components.webhook.async_register( AUTOMATION_DOMAIN, automation_info["name"], webhook_id, partial(_handle_webhook, action), ) @callback def unregister(): """Unregister webhook.""" hass.components.webhook.async_unregister(webhook_id) return unregister
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/automation/webhook.py
"""Support for the light on the Sisyphus Kinetic Art Table.""" import logging import aiohttp from homeassistant.components.light import SUPPORT_BRIGHTNESS, LightEntity from homeassistant.const import CONF_HOST from homeassistant.exceptions import PlatformNotReady from . import DATA_SISYPHUS _LOGGER = logging.getLogger(__name__) SUPPORTED_FEATURES = SUPPORT_BRIGHTNESS async def async_setup_platform(hass, config, add_entities, discovery_info=None): """Set up a single Sisyphus table.""" host = discovery_info[CONF_HOST] try: table_holder = hass.data[DATA_SISYPHUS][host] table = await table_holder.get_table() except aiohttp.ClientError: raise PlatformNotReady() add_entities([SisyphusLight(table_holder.name, table)], update_before_add=True) class SisyphusLight(LightEntity): """Representation of a Sisyphus table as a light.""" def __init__(self, name, table): """Initialize the Sisyphus table.""" self._name = name self._table = table async def async_added_to_hass(self): """Add listeners after this object has been initialized.""" self._table.add_listener(self.async_write_ha_state) @property def available(self): """Return true if the table is responding to heartbeats.""" return self._table.is_connected @property def unique_id(self): """Return the UUID of the table.""" return self._table.id @property def name(self): """Return the ame of the table.""" return self._name @property def is_on(self): """Return True if the table is on.""" return not self._table.is_sleeping @property def brightness(self): """Return the current brightness of the table's ring light.""" return self._table.brightness * 255 @property def supported_features(self): """Return the features supported by the table; i.e. brightness.""" return SUPPORTED_FEATURES async def async_turn_off(self, **kwargs): """Put the table to sleep.""" await self._table.sleep() _LOGGER.debug("Sisyphus table %s: sleep") async def async_turn_on(self, **kwargs): """Wake up the table if necessary, optionally changes brightness.""" if not self.is_on: await self._table.wakeup() _LOGGER.debug("Sisyphus table %s: wakeup") if "brightness" in kwargs: await self._table.set_brightness(kwargs["brightness"] / 255.0)
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/sisyphus/light.py
"""Support for system log.""" from collections import OrderedDict, deque import logging import re import traceback import voluptuous as vol from homeassistant import __path__ as HOMEASSISTANT_PATH from homeassistant.components.http import HomeAssistantView from homeassistant.const import EVENT_HOMEASSISTANT_STOP import homeassistant.helpers.config_validation as cv CONF_MAX_ENTRIES = "max_entries" CONF_FIRE_EVENT = "fire_event" CONF_MESSAGE = "message" CONF_LEVEL = "level" CONF_LOGGER = "logger" DATA_SYSTEM_LOG = "system_log" DEFAULT_MAX_ENTRIES = 50 DEFAULT_FIRE_EVENT = False DOMAIN = "system_log" EVENT_SYSTEM_LOG = "system_log_event" SERVICE_CLEAR = "clear" SERVICE_WRITE = "write" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional( CONF_MAX_ENTRIES, default=DEFAULT_MAX_ENTRIES ): cv.positive_int, vol.Optional(CONF_FIRE_EVENT, default=DEFAULT_FIRE_EVENT): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) SERVICE_CLEAR_SCHEMA = vol.Schema({}) SERVICE_WRITE_SCHEMA = vol.Schema( { vol.Required(CONF_MESSAGE): cv.string, vol.Optional(CONF_LEVEL, default="error"): vol.In( ["debug", "info", "warning", "error", "critical"] ), vol.Optional(CONF_LOGGER): cv.string, } ) def _figure_out_source(record, call_stack, hass): paths = [HOMEASSISTANT_PATH[0], hass.config.config_dir] # If a stack trace exists, extract file names from the entire call stack. # The other case is when a regular "log" is made (without an attached # exception). In that case, just use the file where the log was made from. if record.exc_info: stack = [(x[0], x[1]) for x in traceback.extract_tb(record.exc_info[2])] else: index = -1 for i, frame in enumerate(call_stack): if frame[0] == record.pathname: index = i break if index == -1: # For some reason we couldn't find pathname in the stack. stack = [(record.pathname, record.lineno)] else: stack = call_stack[0 : index + 1] # Iterate through the stack call (in reverse) and find the last call from # a file in Home Assistant. Try to figure out where error happened. paths_re = r"(?:{})/(.*)".format("|".join([re.escape(x) for x in paths])) for pathname in reversed(stack): # Try to match with a file within Home Assistant match = re.match(paths_re, pathname[0]) if match: return [match.group(1), pathname[1]] # Ok, we don't know what this is return (record.pathname, record.lineno) class LogEntry: """Store HA log entries.""" def __init__(self, record, stack, source): """Initialize a log entry.""" self.first_occurred = self.timestamp = record.created self.name = record.name self.level = record.levelname self.message = deque([record.getMessage()], maxlen=5) self.exception = "" self.root_cause = None if record.exc_info: self.exception = "".join(traceback.format_exception(*record.exc_info)) _, _, tb = record.exc_info # pylint: disable=invalid-name # Last line of traceback contains the root cause of the exception if traceback.extract_tb(tb): self.root_cause = str(traceback.extract_tb(tb)[-1]) self.source = source self.count = 1 self.hash = str([self.name, *self.source, self.root_cause]) def to_dict(self): """Convert object into dict to maintain backward compatibility.""" return { "name": self.name, "message": list(self.message), "level": self.level, "source": self.source, "timestamp": self.timestamp, "exception": self.exception, "count": self.count, "first_occurred": self.first_occurred, } class DedupStore(OrderedDict): """Data store to hold max amount of deduped entries.""" def __init__(self, maxlen=50): """Initialize a new DedupStore.""" super().__init__() self.maxlen = maxlen def add_entry(self, entry): """Add a new entry.""" key = entry.hash if key in self: # Update stored entry existing = self[key] existing.count += 1 existing.timestamp = entry.timestamp if entry.message[0] not in existing.message: existing.message.append(entry.message[0]) self.move_to_end(key) else: self[key] = entry if len(self) > self.maxlen: # Removes the first record which should also be the oldest self.popitem(last=False) def to_list(self): """Return reversed list of log entries - LIFO.""" return [value.to_dict() for value in reversed(self.values())] class LogErrorHandler(logging.Handler): """Log handler for error messages.""" def __init__(self, hass, maxlen, fire_event): """Initialize a new LogErrorHandler.""" super().__init__() self.hass = hass self.records = DedupStore(maxlen=maxlen) self.fire_event = fire_event def emit(self, record): """Save error and warning logs. Everything logged with error or warning is saved in local buffer. A default upper limit is set to 50 (older entries are discarded) but can be changed if needed. """ if record.levelno >= logging.WARN: stack = [] if not record.exc_info: stack = [(f[0], f[1]) for f in traceback.extract_stack()] entry = LogEntry( record, stack, _figure_out_source(record, stack, self.hass) ) self.records.add_entry(entry) if self.fire_event: self.hass.bus.fire(EVENT_SYSTEM_LOG, entry.to_dict()) async def async_setup(hass, config): """Set up the logger component.""" conf = config.get(DOMAIN) if conf is None: conf = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN] handler = LogErrorHandler(hass, conf[CONF_MAX_ENTRIES], conf[CONF_FIRE_EVENT]) logging.getLogger().addHandler(handler) hass.http.register_view(AllErrorsView(handler)) async def async_service_handler(service): """Handle logger services.""" if service.service == "clear": handler.records.clear() return if service.service == "write": logger = logging.getLogger( service.data.get(CONF_LOGGER, f"{__name__}.external") ) level = service.data[CONF_LEVEL] getattr(logger, level)(service.data[CONF_MESSAGE]) async def async_shutdown_handler(event): """Remove logging handler when Home Assistant is shutdown.""" # This is needed as older logger instances will remain logging.getLogger().removeHandler(handler) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown_handler) hass.services.async_register( DOMAIN, SERVICE_CLEAR, async_service_handler, schema=SERVICE_CLEAR_SCHEMA ) hass.services.async_register( DOMAIN, SERVICE_WRITE, async_service_handler, schema=SERVICE_WRITE_SCHEMA ) return True class AllErrorsView(HomeAssistantView): """Get all logged errors and warnings.""" url = "/api/error/all" name = "api:error:all" def __init__(self, handler): """Initialize a new AllErrorsView.""" self.handler = handler async def get(self, request): """Get all errors and warnings.""" return self.json(self.handler.records.to_list())
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/system_log/__init__.py
"""Component for the Somfy MyLink device supporting the Synergy API.""" import logging from somfy_mylink_synergy import SomfyMyLinkSynergy import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers import config_validation as cv from homeassistant.helpers.discovery import async_load_platform _LOGGER = logging.getLogger(__name__) CONF_ENTITY_CONFIG = "entity_config" CONF_SYSTEM_ID = "system_id" CONF_REVERSE = "reverse" CONF_DEFAULT_REVERSE = "default_reverse" DATA_SOMFY_MYLINK = "somfy_mylink_data" DOMAIN = "somfy_mylink" SOMFY_MYLINK_COMPONENTS = ["cover"] def validate_entity_config(values): """Validate config entry for CONF_ENTITY.""" entity_config_schema = vol.Schema({vol.Optional(CONF_REVERSE): cv.boolean}) if not isinstance(values, dict): raise vol.Invalid("expected a dictionary") entities = {} for entity_id, config in values.items(): entity = cv.entity_id(entity_id) config = entity_config_schema(config) entities[entity] = config return entities CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_SYSTEM_ID): cv.string, vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=44100): cv.port, vol.Optional(CONF_DEFAULT_REVERSE, default=False): cv.boolean, vol.Optional(CONF_ENTITY_CONFIG, default={}): validate_entity_config, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the MyLink platform.""" host = config[DOMAIN][CONF_HOST] port = config[DOMAIN][CONF_PORT] system_id = config[DOMAIN][CONF_SYSTEM_ID] entity_config = config[DOMAIN][CONF_ENTITY_CONFIG] entity_config[CONF_DEFAULT_REVERSE] = config[DOMAIN][CONF_DEFAULT_REVERSE] somfy_mylink = SomfyMyLinkSynergy(system_id, host, port) hass.data[DATA_SOMFY_MYLINK] = somfy_mylink for component in SOMFY_MYLINK_COMPONENTS: hass.async_create_task( async_load_platform(hass, component, DOMAIN, entity_config, config) ) return True
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/somfy_mylink/__init__.py
"""Support for the Rainforest Eagle-200 energy monitor.""" from datetime import timedelta import logging from eagle200_reader import EagleReader from requests.exceptions import ConnectionError as ConnectError, HTTPError, Timeout from uEagle import Eagle as LegacyReader import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_IP_ADDRESS, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle CONF_CLOUD_ID = "cloud_id" CONF_INSTALL_CODE = "install_code" POWER_KILO_WATT = "kW" _LOGGER = logging.getLogger(__name__) MIN_SCAN_INTERVAL = timedelta(seconds=30) SENSORS = { "instantanous_demand": ("Eagle-200 Meter Power Demand", POWER_KILO_WATT), "summation_delivered": ( "Eagle-200 Total Meter Energy Delivered", ENERGY_KILO_WATT_HOUR, ), "summation_received": ( "Eagle-200 Total Meter Energy Received", ENERGY_KILO_WATT_HOUR, ), "summation_total": ( "Eagle-200 Net Meter Energy (Delivered minus Received)", ENERGY_KILO_WATT_HOUR, ), } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Required(CONF_CLOUD_ID): cv.string, vol.Required(CONF_INSTALL_CODE): cv.string, } ) def hwtest(cloud_id, install_code, ip_address): """Try API call 'get_network_info' to see if target device is Legacy or Eagle-200.""" reader = LeagleReader(cloud_id, install_code, ip_address) response = reader.get_network_info() # Branch to test if target is Legacy Model if "NetworkInfo" in response: if response["NetworkInfo"].get("ModelId", None) == "Z109-EAGLE": return reader # Branch to test if target is Eagle-200 Model if "Response" in response: if response["Response"].get("Command", None) == "get_network_info": return EagleReader(ip_address, cloud_id, install_code) # Catch-all if hardware ID tests fail raise ValueError("Couldn't determine device model.") def setup_platform(hass, config, add_entities, discovery_info=None): """Create the Eagle-200 sensor.""" ip_address = config[CONF_IP_ADDRESS] cloud_id = config[CONF_CLOUD_ID] install_code = config[CONF_INSTALL_CODE] try: eagle_reader = hwtest(cloud_id, install_code, ip_address) except (ConnectError, HTTPError, Timeout, ValueError) as error: _LOGGER.error("Failed to connect during setup: %s", error) return eagle_data = EagleData(eagle_reader) eagle_data.update() monitored_conditions = list(SENSORS) sensors = [] for condition in monitored_conditions: sensors.append( EagleSensor( eagle_data, condition, SENSORS[condition][0], SENSORS[condition][1] ) ) add_entities(sensors) class EagleSensor(Entity): """Implementation of the Rainforest Eagle-200 sensor.""" def __init__(self, eagle_data, sensor_type, name, unit): """Initialize the sensor.""" self.eagle_data = eagle_data self._type = sensor_type self._name = name self._unit_of_measurement = unit self._state = None @property def device_class(self): """Return the power device class for the instantanous_demand sensor.""" if self._type == "instantanous_demand": return DEVICE_CLASS_POWER return None @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement.""" return self._unit_of_measurement def update(self): """Get the energy information from the Rainforest Eagle.""" self.eagle_data.update() self._state = self.eagle_data.get_state(self._type) class EagleData: """Get the latest data from the Eagle-200 device.""" def __init__(self, eagle_reader): """Initialize the data object.""" self._eagle_reader = eagle_reader self.data = {} @Throttle(MIN_SCAN_INTERVAL) def update(self): """Get the latest data from the Eagle-200 device.""" try: self.data = self._eagle_reader.update() _LOGGER.debug("API data: %s", self.data) except (ConnectError, HTTPError, Timeout, ValueError) as error: _LOGGER.error("Unable to connect during update: %s", error) self.data = {} def get_state(self, sensor_type): """Get the sensor value from the dictionary.""" state = self.data.get(sensor_type) _LOGGER.debug("Updating: %s - %s", sensor_type, state) return state class LeagleReader(LegacyReader): """Wraps uEagle to make it behave like eagle_reader, offering update().""" def update(self): """Fetch and return the four sensor values in a dict.""" out = {} resp = self.get_instantaneous_demand()["InstantaneousDemand"] out["instantanous_demand"] = resp["Demand"] resp = self.get_current_summation()["CurrentSummation"] out["summation_delivered"] = resp["SummationDelivered"] out["summation_received"] = resp["SummationReceived"] out["summation_total"] = out["summation_delivered"] - out["summation_received"] return out
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/rainforest_eagle/sensor.py
"""Handle MySensors devices.""" from functools import partial import logging from homeassistant.const import ATTR_BATTERY_LEVEL, STATE_OFF, STATE_ON from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from .const import CHILD_CALLBACK, NODE_CALLBACK, UPDATE_DELAY _LOGGER = logging.getLogger(__name__) ATTR_CHILD_ID = "child_id" ATTR_DESCRIPTION = "description" ATTR_DEVICE = "device" ATTR_NODE_ID = "node_id" ATTR_HEARTBEAT = "heartbeat" MYSENSORS_PLATFORM_DEVICES = "mysensors_devices_{}" def get_mysensors_devices(hass, domain): """Return MySensors devices for a platform.""" if MYSENSORS_PLATFORM_DEVICES.format(domain) not in hass.data: hass.data[MYSENSORS_PLATFORM_DEVICES.format(domain)] = {} return hass.data[MYSENSORS_PLATFORM_DEVICES.format(domain)] class MySensorsDevice: """Representation of a MySensors device.""" def __init__(self, gateway, node_id, child_id, name, value_type): """Set up the MySensors device.""" self.gateway = gateway self.node_id = node_id self.child_id = child_id self._name = name self.value_type = value_type child = gateway.sensors[node_id].children[child_id] self.child_type = child.type self._values = {} self._update_scheduled = False self.hass = None @property def name(self): """Return the name of this entity.""" return self._name @property def device_state_attributes(self): """Return device specific state attributes.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] attr = { ATTR_BATTERY_LEVEL: node.battery_level, ATTR_HEARTBEAT: node.heartbeat, ATTR_CHILD_ID: self.child_id, ATTR_DESCRIPTION: child.description, ATTR_DEVICE: self.gateway.device, ATTR_NODE_ID: self.node_id, } set_req = self.gateway.const.SetReq for value_type, value in self._values.items(): attr[set_req(value_type).name] = value return attr async def async_update(self): """Update the controller with the latest value from a sensor.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] set_req = self.gateway.const.SetReq for value_type, value in child.values.items(): _LOGGER.debug( "Entity update: %s: value_type %s, value = %s", self._name, value_type, value, ) if value_type in ( set_req.V_ARMED, set_req.V_LIGHT, set_req.V_LOCK_STATUS, set_req.V_TRIPPED, ): self._values[value_type] = STATE_ON if int(value) == 1 else STATE_OFF elif value_type == set_req.V_DIMMER: self._values[value_type] = int(value) else: self._values[value_type] = value async def _async_update_callback(self): """Update the device.""" raise NotImplementedError @callback def async_update_callback(self): """Update the device after delay.""" if self._update_scheduled: return async def update(): """Perform update.""" try: await self._async_update_callback() except Exception: # pylint: disable=broad-except _LOGGER.exception("Error updating %s", self.name) finally: self._update_scheduled = False self._update_scheduled = True delayed_update = partial(self.hass.async_create_task, update()) self.hass.loop.call_later(UPDATE_DELAY, delayed_update) class MySensorsEntity(MySensorsDevice, Entity): """Representation of a MySensors entity.""" @property def should_poll(self): """Return the polling state. The gateway pushes its states.""" return False @property def available(self): """Return true if entity is available.""" return self.value_type in self._values async def _async_update_callback(self): """Update the entity.""" await self.async_update_ha_state(True) async def async_added_to_hass(self): """Register update callback.""" gateway_id = id(self.gateway) dev_id = gateway_id, self.node_id, self.child_id, self.value_type self.async_on_remove( async_dispatcher_connect( self.hass, CHILD_CALLBACK.format(*dev_id), self.async_update_callback ) ) self.async_on_remove( async_dispatcher_connect( self.hass, NODE_CALLBACK.format(gateway_id, self.node_id), self.async_update_callback, ) )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/mysensors/device.py
"""ONVIF device abstraction.""" import asyncio import datetime as dt import os from typing import List from aiohttp.client_exceptions import ClientConnectionError, ServerDisconnectedError import onvif from onvif import ONVIFCamera from onvif.exceptions import ONVIFError from zeep.exceptions import Fault from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util from .const import ( ABSOLUTE_MOVE, CONTINUOUS_MOVE, GOTOPRESET_MOVE, LOGGER, PAN_FACTOR, RELATIVE_MOVE, TILT_FACTOR, ZOOM_FACTOR, ) from .event import EventManager from .models import PTZ, Capabilities, DeviceInfo, Profile, Resolution, Video class ONVIFDevice: """Manages an ONVIF device.""" def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry = None): """Initialize the device.""" self.hass: HomeAssistant = hass self.config_entry: ConfigEntry = config_entry self.available: bool = True self.device: ONVIFCamera = None self.events: EventManager = None self.info: DeviceInfo = DeviceInfo() self.capabilities: Capabilities = Capabilities() self.profiles: List[Profile] = [] self.max_resolution: int = 0 self._dt_diff_seconds: int = 0 @property def name(self) -> str: """Return the name of this device.""" return self.config_entry.data[CONF_NAME] @property def host(self) -> str: """Return the host of this device.""" return self.config_entry.data[CONF_HOST] @property def port(self) -> int: """Return the port of this device.""" return self.config_entry.data[CONF_PORT] @property def username(self) -> int: """Return the username of this device.""" return self.config_entry.data[CONF_USERNAME] @property def password(self) -> int: """Return the password of this device.""" return self.config_entry.data[CONF_PASSWORD] async def async_setup(self) -> bool: """Set up the device.""" self.device = get_device( self.hass, host=self.config_entry.data[CONF_HOST], port=self.config_entry.data[CONF_PORT], username=self.config_entry.data[CONF_USERNAME], password=self.config_entry.data[CONF_PASSWORD], ) # Get all device info try: await self.device.update_xaddrs() await self.async_check_date_and_time() self.info = await self.async_get_device_info() self.capabilities = await self.async_get_capabilities() self.profiles = await self.async_get_profiles() if self.capabilities.ptz: self.device.create_ptz_service() if self.capabilities.events: self.events = EventManager( self.hass, self.device, self.config_entry.unique_id ) # Determine max resolution from profiles self.max_resolution = max( profile.video.resolution.width for profile in self.profiles if profile.video.encoding == "H264" ) except ClientConnectionError as err: LOGGER.warning( "Couldn't connect to camera '%s', but will retry later. Error: %s", self.name, err, ) self.available = False except Fault as err: LOGGER.error( "Couldn't connect to camera '%s', please verify " "that the credentials are correct. Error: %s", self.name, err, ) return False return True async def async_stop(self, event=None): """Shut it all down.""" if self.events: await self.events.async_stop() await self.device.close() async def async_check_date_and_time(self) -> None: """Warns if device and system date not synced.""" LOGGER.debug("Setting up the ONVIF device management service") device_mgmt = self.device.create_devicemgmt_service() LOGGER.debug("Retrieving current device date/time") try: system_date = dt_util.utcnow() device_time = await device_mgmt.GetSystemDateAndTime() if not device_time: LOGGER.debug( """Couldn't get device '%s' date/time. GetSystemDateAndTime() return null/empty""", self.name, ) return if device_time.UTCDateTime: tzone = dt_util.UTC cdate = device_time.UTCDateTime else: tzone = ( dt_util.get_time_zone(device_time.TimeZone) or dt_util.DEFAULT_TIME_ZONE ) cdate = device_time.LocalDateTime if cdate is None: LOGGER.warning("Could not retrieve date/time on this camera") else: cam_date = dt.datetime( cdate.Date.Year, cdate.Date.Month, cdate.Date.Day, cdate.Time.Hour, cdate.Time.Minute, cdate.Time.Second, 0, tzone, ) cam_date_utc = cam_date.astimezone(dt_util.UTC) LOGGER.debug( "Device date/time: %s | System date/time: %s", cam_date_utc, system_date, ) dt_diff = cam_date - system_date self._dt_diff_seconds = dt_diff.total_seconds() if self._dt_diff_seconds > 5: LOGGER.warning( "The date/time on the device (UTC) is '%s', " "which is different from the system '%s', " "this could lead to authentication issues", cam_date_utc, system_date, ) except ServerDisconnectedError as err: LOGGER.warning( "Couldn't get device '%s' date/time. Error: %s", self.name, err ) async def async_get_device_info(self) -> DeviceInfo: """Obtain information about this device.""" device_mgmt = self.device.create_devicemgmt_service() device_info = await device_mgmt.GetDeviceInformation() # Grab the last MAC address for backwards compatibility mac = None try: network_interfaces = await device_mgmt.GetNetworkInterfaces() for interface in network_interfaces: if interface.Enabled: mac = interface.Info.HwAddress except Fault as fault: if "not implemented" not in fault.message: raise fault LOGGER.debug( "Couldn't get network interfaces from ONVIF deivice '%s'. Error: %s", self.name, fault, ) return DeviceInfo( device_info.Manufacturer, device_info.Model, device_info.FirmwareVersion, device_info.SerialNumber, mac, ) async def async_get_capabilities(self): """Obtain information about the available services on the device.""" snapshot = False try: media_service = self.device.create_media_service() media_capabilities = await media_service.GetServiceCapabilities() snapshot = media_capabilities and media_capabilities.SnapshotUri except (ONVIFError, Fault, ServerDisconnectedError): pass pullpoint = False try: event_service = self.device.create_events_service() event_capabilities = await event_service.GetServiceCapabilities() pullpoint = event_capabilities and event_capabilities.WSPullPointSupport except (ONVIFError, Fault): pass ptz = False try: self.device.get_definition("ptz") ptz = True except ONVIFError: pass return Capabilities(snapshot, pullpoint, ptz) async def async_get_profiles(self) -> List[Profile]: """Obtain media profiles for this device.""" media_service = self.device.create_media_service() result = await media_service.GetProfiles() profiles = [] for key, onvif_profile in enumerate(result): # Only add H264 profiles if ( not onvif_profile.VideoEncoderConfiguration or onvif_profile.VideoEncoderConfiguration.Encoding != "H264" ): continue profile = Profile( key, onvif_profile.token, onvif_profile.Name, Video( onvif_profile.VideoEncoderConfiguration.Encoding, Resolution( onvif_profile.VideoEncoderConfiguration.Resolution.Width, onvif_profile.VideoEncoderConfiguration.Resolution.Height, ), ), ) # Configure PTZ options if onvif_profile.PTZConfiguration: profile.ptz = PTZ( onvif_profile.PTZConfiguration.DefaultContinuousPanTiltVelocitySpace is not None, onvif_profile.PTZConfiguration.DefaultRelativePanTiltTranslationSpace is not None, onvif_profile.PTZConfiguration.DefaultAbsolutePantTiltPositionSpace is not None, ) try: ptz_service = self.device.create_ptz_service() presets = await ptz_service.GetPresets(profile.token) profile.ptz.presets = [preset.token for preset in presets if preset] except (Fault, ServerDisconnectedError): # It's OK if Presets aren't supported profile.ptz.presets = [] profiles.append(profile) return profiles async def async_get_snapshot_uri(self, profile: Profile) -> str: """Get the snapshot URI for a specified profile.""" if not self.capabilities.snapshot: return None media_service = self.device.create_media_service() req = media_service.create_type("GetSnapshotUri") req.ProfileToken = profile.token result = await media_service.GetSnapshotUri(req) return result.Uri async def async_get_stream_uri(self, profile: Profile) -> str: """Get the stream URI for a specified profile.""" media_service = self.device.create_media_service() req = media_service.create_type("GetStreamUri") req.ProfileToken = profile.token req.StreamSetup = { "Stream": "RTP-Unicast", "Transport": {"Protocol": "RTSP"}, } result = await media_service.GetStreamUri(req) return result.Uri async def async_perform_ptz( self, profile: Profile, distance, speed, move_mode, continuous_duration, preset, pan=None, tilt=None, zoom=None, ): """Perform a PTZ action on the camera.""" if not self.capabilities.ptz: LOGGER.warning("PTZ actions are not supported on device '%s'", self.name) return ptz_service = self.device.create_ptz_service() pan_val = distance * PAN_FACTOR.get(pan, 0) tilt_val = distance * TILT_FACTOR.get(tilt, 0) zoom_val = distance * ZOOM_FACTOR.get(zoom, 0) speed_val = speed preset_val = preset LOGGER.debug( "Calling %s PTZ | Pan = %4.2f | Tilt = %4.2f | Zoom = %4.2f | Speed = %4.2f | Preset = %s", move_mode, pan_val, tilt_val, zoom_val, speed_val, preset_val, ) try: req = ptz_service.create_type(move_mode) req.ProfileToken = profile.token if move_mode == CONTINUOUS_MOVE: # Guard against unsupported operation if not profile.ptz.continuous: LOGGER.warning( "ContinuousMove not supported on device '%s'", self.name ) return req.Velocity = { "PanTilt": {"x": pan_val, "y": tilt_val}, "Zoom": {"x": zoom_val}, } await ptz_service.ContinuousMove(req) await asyncio.sleep(continuous_duration) req = ptz_service.create_type("Stop") req.ProfileToken = profile.token await ptz_service.Stop({"ProfileToken": req.ProfileToken}) elif move_mode == RELATIVE_MOVE: # Guard against unsupported operation if not profile.ptz.relative: LOGGER.warning( "ContinuousMove not supported on device '%s'", self.name ) return req.Translation = { "PanTilt": {"x": pan_val, "y": tilt_val}, "Zoom": {"x": zoom_val}, } req.Speed = { "PanTilt": {"x": speed_val, "y": speed_val}, "Zoom": {"x": speed_val}, } await ptz_service.RelativeMove(req) elif move_mode == ABSOLUTE_MOVE: # Guard against unsupported operation if not profile.ptz.absolute: LOGGER.warning( "ContinuousMove not supported on device '%s'", self.name ) return req.Position = { "PanTilt": {"x": pan_val, "y": tilt_val}, "Zoom": {"x": zoom_val}, } req.Speed = { "PanTilt": {"x": speed_val, "y": speed_val}, "Zoom": {"x": speed_val}, } await ptz_service.AbsoluteMove(req) elif move_mode == GOTOPRESET_MOVE: # Guard against unsupported operation if preset_val not in profile.ptz.presets: LOGGER.warning( "PTZ preset '%s' does not exist on device '%s'. Available Presets: %s", preset_val, self.name, ", ".join(profile.ptz.presets), ) return req.PresetToken = preset_val req.Speed = { "PanTilt": {"x": speed_val, "y": speed_val}, "Zoom": {"x": speed_val}, } await ptz_service.GotoPreset(req) except ONVIFError as err: if "Bad Request" in err.reason: LOGGER.warning("Device '%s' doesn't support PTZ.", self.name) else: LOGGER.error("Error trying to perform PTZ action: %s", err) def get_device(hass, host, port, username, password) -> ONVIFCamera: """Get ONVIFCamera instance.""" return ONVIFCamera( host, port, username, password, f"{os.path.dirname(onvif.__file__)}/wsdl/", no_cache=True, )
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/onvif/device.py
"""Support for Bbox Bouygues Modem Router.""" from datetime import timedelta import logging import pybbox import requests import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_MONITORED_VARIABLES, CONF_NAME, DATA_RATE_MEGABITS_PER_SECOND, DEVICE_CLASS_TIMESTAMP, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle from homeassistant.util.dt import utcnow _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Powered by Bouygues Telecom" DEFAULT_NAME = "Bbox" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) # Sensor types are defined like so: Name, unit, icon SENSOR_TYPES = { "down_max_bandwidth": [ "Maximum Download Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:download", ], "up_max_bandwidth": [ "Maximum Upload Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:upload", ], "current_down_bandwidth": [ "Currently Used Download Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:download", ], "current_up_bandwidth": [ "Currently Used Upload Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:upload", ], "uptime": ["Uptime", None, "mdi:clock"], "number_of_reboots": ["Number of reboot", None, "mdi:restart"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_MONITORED_VARIABLES): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Bbox sensor.""" # Create a data fetcher to support all of the configured sensors. Then make # the first call to init the data. try: bbox_data = BboxData() bbox_data.update() except requests.exceptions.HTTPError as error: _LOGGER.error(error) return False name = config[CONF_NAME] sensors = [] for variable in config[CONF_MONITORED_VARIABLES]: if variable == "uptime": sensors.append(BboxUptimeSensor(bbox_data, variable, name)) else: sensors.append(BboxSensor(bbox_data, variable, name)) add_entities(sensors, True) class BboxUptimeSensor(Entity): """Bbox uptime sensor.""" def __init__(self, bbox_data, sensor_type, name): """Initialize the sensor.""" self.client_name = name self.type = sensor_type self._name = SENSOR_TYPES[sensor_type][0] self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self._icon = SENSOR_TYPES[sensor_type][2] self.bbox_data = bbox_data self._state = None @property def name(self): """Return the name of the sensor.""" return f"{self.client_name} {self._name}" @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def device_state_attributes(self): """Return the state attributes.""" return {ATTR_ATTRIBUTION: ATTRIBUTION} @property def device_class(self): """Return the class of this sensor.""" return DEVICE_CLASS_TIMESTAMP def update(self): """Get the latest data from Bbox and update the state.""" self.bbox_data.update() uptime = utcnow() - timedelta( seconds=self.bbox_data.router_infos["device"]["uptime"] ) self._state = uptime.replace(microsecond=0).isoformat() class BboxSensor(Entity): """Implementation of a Bbox sensor.""" def __init__(self, bbox_data, sensor_type, name): """Initialize the sensor.""" self.client_name = name self.type = sensor_type self._name = SENSOR_TYPES[sensor_type][0] self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self._icon = SENSOR_TYPES[sensor_type][2] self.bbox_data = bbox_data self._state = None @property def name(self): """Return the name of the sensor.""" return f"{self.client_name} {self._name}" @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def device_state_attributes(self): """Return the state attributes.""" return {ATTR_ATTRIBUTION: ATTRIBUTION} def update(self): """Get the latest data from Bbox and update the state.""" self.bbox_data.update() if self.type == "down_max_bandwidth": self._state = round(self.bbox_data.data["rx"]["maxBandwidth"] / 1000, 2) elif self.type == "up_max_bandwidth": self._state = round(self.bbox_data.data["tx"]["maxBandwidth"] / 1000, 2) elif self.type == "current_down_bandwidth": self._state = round(self.bbox_data.data["rx"]["bandwidth"] / 1000, 2) elif self.type == "current_up_bandwidth": self._state = round(self.bbox_data.data["tx"]["bandwidth"] / 1000, 2) elif self.type == "number_of_reboots": self._state = self.bbox_data.router_infos["device"]["numberofboots"] class BboxData: """Get data from the Bbox.""" def __init__(self): """Initialize the data object.""" self.data = None self.router_infos = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from the Bbox.""" try: box = pybbox.Bbox() self.data = box.get_ip_stats() self.router_infos = box.get_bbox_info() except requests.exceptions.HTTPError as error: _LOGGER.error(error) self.data = None self.router_infos = None return False
"""The tests for the Template fan platform.""" import logging import pytest import voluptuous as vol from homeassistant import setup from homeassistant.components.fan import ( ATTR_DIRECTION, ATTR_OSCILLATING, ATTR_SPEED, DIRECTION_FORWARD, DIRECTION_REVERSE, SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from tests.common import assert_setup_component, async_mock_service from tests.components.fan import common _LOGGER = logging.getLogger(__name__) _TEST_FAN = "fan.test_fan" # Represent for fan's state _STATE_INPUT_BOOLEAN = "input_boolean.state" # Represent for fan's state _STATE_AVAILABILITY_BOOLEAN = "availability_boolean.state" # Represent for fan's speed _SPEED_INPUT_SELECT = "input_select.speed" # Represent for fan's oscillating _OSC_INPUT = "input_select.osc" # Represent for fan's direction _DIRECTION_INPUT_SELECT = "input_select.direction" @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") # Configuration tests # async def test_missing_optional_config(hass, calls): """Test: missing optional template is ok.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, None, None, None) async def test_missing_value_template_config(hass, calls): """Test: missing 'value_template' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_on_config(hass, calls): """Test: missing 'turn_on' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_missing_turn_off_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] async def test_invalid_config(hass, calls): """Test: missing 'turn_off' will fail.""" with assert_setup_component(0, "fan"): assert await setup.async_setup_component( hass, "fan", { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "turn_on": {"service": "script.fan_on"}, } }, }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.async_all() == [] # End of configuration tests # # Template tests # async def test_templates_with_entities(hass, calls): """Test tempalates with values from other entities.""" value_template = """ {% if is_state('input_boolean.state', 'True') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) hass.states.async_set(_STATE_INPUT_BOOLEAN, True) hass.states.async_set(_SPEED_INPUT_SELECT, SPEED_MEDIUM) hass.states.async_set(_OSC_INPUT, "True") hass.states.async_set(_DIRECTION_INPUT_SELECT, DIRECTION_FORWARD) await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_availability_template_with_entities(hass, calls): """Test availability tempalates with values from other entities.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "availability_template": "{{ is_state('availability_boolean.state', 'on') }}", "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # When template returns true.. hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_ON) await hass.async_block_till_done() # Device State should not be unavailable assert hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE # When Availability template returns false hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, STATE_OFF) await hass.async_block_till_done() # device state should be unavailable assert hass.states.get(_TEST_FAN).state == STATE_UNAVAILABLE async def test_templates_with_valid_values(hass, calls): """Test templates with valid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "speed_template": "{{ 'medium' }}", "oscillating_template": "{{ 1 == 1 }}", "direction_template": "{{ 'forward' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_ON, SPEED_MEDIUM, True, DIRECTION_FORWARD) async def test_templates_invalid_values(hass, calls): """Test templates with invalid values.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'abc' }}", "speed_template": "{{ '0' }}", "oscillating_template": "{{ 'xyz' }}", "direction_template": "{{ 'right' }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() _verify(hass, STATE_OFF, None, None, None) async def test_invalid_availability_template_keeps_component_available(hass, caplog): """Test that an invalid availability keeps the device available.""" with assert_setup_component(1, "fan"): assert await setup.async_setup_component( hass, "fan", { "fan": { "platform": "template", "fans": { "test_fan": { "value_template": "{{ 'on' }}", "availability_template": "{{ x - 12 }}", "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": {"service": "script.fan_on"}, "turn_off": {"service": "script.fan_off"}, } }, } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE assert ("Could not render availability_template template") in caplog.text assert ("UndefinedError: 'x' is undefined") in caplog.text # End of template tests # # Function tests # async def test_on_off(hass, calls): """Test turn on and turn off.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON _verify(hass, STATE_ON, None, None, None) # Turn off fan await common.async_turn_off(hass, _TEST_FAN) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_OFF _verify(hass, STATE_OFF, None, None, None) async def test_on_with_speed(hass, calls): """Test turn on with speed.""" await _register_components(hass) # Turn on fan with high speed await common.async_turn_on(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_STATE_INPUT_BOOLEAN).state == STATE_ON assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_set_speed(hass, calls): """Test set valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to medium await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_MEDIUM _verify(hass, STATE_ON, SPEED_MEDIUM, None, None) async def test_set_invalid_speed_from_initial_stage(hass, calls): """Test set invalid speed when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_speed(hass, calls): """Test set invalid speed when fan has valid speed.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to high await common.async_set_speed(hass, _TEST_FAN, SPEED_HIGH) # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) # Set fan's speed to 'invalid' await common.async_set_speed(hass, _TEST_FAN, "invalid") # verify speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == SPEED_HIGH _verify(hass, STATE_ON, SPEED_HIGH, None, None) async def test_custom_speed_list(hass, calls): """Test set custom speed list.""" await _register_components(hass, ["1", "2", "3"]) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's speed to '1' await common.async_set_speed(hass, _TEST_FAN, "1") # verify assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) # Set fan's speed to 'medium' which is invalid await common.async_set_speed(hass, _TEST_FAN, SPEED_MEDIUM) # verify that speed is unchanged assert hass.states.get(_SPEED_INPUT_SELECT).state == "1" _verify(hass, STATE_ON, "1", None, None) async def test_set_osc(hass, calls): """Test set oscillating.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to False await common.async_oscillate(hass, _TEST_FAN, False) # verify assert hass.states.get(_OSC_INPUT).state == "False" _verify(hass, STATE_ON, None, False, None) async def test_set_invalid_osc_from_initial_state(hass, calls): """Test set invalid oscillating when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to 'invalid' with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, "invalid") # verify assert hass.states.get(_OSC_INPUT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_osc(hass, calls): """Test set invalid oscillating when fan has valid osc.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's osc to True await common.async_oscillate(hass, _TEST_FAN, True) # verify assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) # Set fan's osc to None with pytest.raises(vol.Invalid): await common.async_oscillate(hass, _TEST_FAN, None) # verify osc is unchanged assert hass.states.get(_OSC_INPUT).state == "True" _verify(hass, STATE_ON, None, True, None) async def test_set_direction(hass, calls): """Test set valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to reverse await common.async_set_direction(hass, _TEST_FAN, DIRECTION_REVERSE) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_REVERSE _verify(hass, STATE_ON, None, None, DIRECTION_REVERSE) async def test_set_invalid_direction_from_initial_stage(hass, calls): """Test set invalid direction when fan is in initial state.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == "" _verify(hass, STATE_ON, None, None, None) async def test_set_invalid_direction(hass, calls): """Test set invalid direction when fan has valid direction.""" await _register_components(hass) # Turn on fan await common.async_turn_on(hass, _TEST_FAN) # Set fan's direction to forward await common.async_set_direction(hass, _TEST_FAN, DIRECTION_FORWARD) # verify assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) # Set fan's direction to 'invalid' await common.async_set_direction(hass, _TEST_FAN, "invalid") # verify direction is unchanged assert hass.states.get(_DIRECTION_INPUT_SELECT).state == DIRECTION_FORWARD _verify(hass, STATE_ON, None, None, DIRECTION_FORWARD) def _verify( hass, expected_state, expected_speed, expected_oscillating, expected_direction ): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes assert state.state == expected_state assert attributes.get(ATTR_SPEED) == expected_speed assert attributes.get(ATTR_OSCILLATING) == expected_oscillating assert attributes.get(ATTR_DIRECTION) == expected_direction async def _register_components(hass, speed_list=None): """Register basic components for testing.""" with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} ) with assert_setup_component(3, "input_select"): assert await setup.async_setup_component( hass, "input_select", { "input_select": { "speed": { "name": "Speed", "options": [ "", SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, "1", "2", "3", ], }, "osc": {"name": "oscillating", "options": ["", "True", "False"]}, "direction": { "name": "Direction", "options": ["", DIRECTION_FORWARD, DIRECTION_REVERSE], }, } }, ) with assert_setup_component(1, "fan"): value_template = """ {% if is_state('input_boolean.state', 'on') %} {{ 'on' }} {% else %} {{ 'off' }} {% endif %} """ test_fan_config = { "value_template": value_template, "speed_template": "{{ states('input_select.speed') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", "turn_on": { "service": "input_boolean.turn_on", "entity_id": _STATE_INPUT_BOOLEAN, }, "turn_off": { "service": "input_boolean.turn_off", "entity_id": _STATE_INPUT_BOOLEAN, }, "set_speed": { "service": "input_select.select_option", "data_template": { "entity_id": _SPEED_INPUT_SELECT, "option": "{{ speed }}", }, }, "set_oscillating": { "service": "input_select.select_option", "data_template": { "entity_id": _OSC_INPUT, "option": "{{ oscillating }}", }, }, "set_direction": { "service": "input_select.select_option", "data_template": { "entity_id": _DIRECTION_INPUT_SELECT, "option": "{{ direction }}", }, }, } if speed_list: test_fan_config["speeds"] = speed_list assert await setup.async_setup_component( hass, "fan", {"fan": {"platform": "template", "fans": {"test_fan": test_fan_config}}}, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done()
robbiet480/home-assistant
tests/components/template/test_fan.py
homeassistant/components/bbox/sensor.py
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import os from asv.graph import Graph, RESAMPLED_POINTS, make_summary_graph def test_graph_single(): vals = [ (1, 1, None), (2, 2, 1), (3, 3, None), (4, 4, 7), (5, 5, None), (6, None, None), (7, float('nan'), None), ] # Should give same data back, excluding missing values at edges g = Graph('foo', {}) for k, v, dv in vals: g.add_data_point(k, v, dv) data = g.get_data() assert data == vals[:-2] # Should average duplicate values g = Graph('foo', {}) g.add_data_point(4, 3) for k, v, dv in vals: g.add_data_point(k, v, dv) g.add_data_point(4, 5) data = g.get_data() assert data[3][0] == 4 assert abs(data[3][1] - (3 + 4 + 5)/3.) < 1e-10 # Summary graph should be the same as the main graph g = Graph('foo', {}) for k, v, dv in vals: g.add_data_point(k, v, dv) g = make_summary_graph([g]) data = g.get_data() assert len(data) == len(vals) - 2 for v, d in zip(vals, data): kv, xv, dv = v kd, xd, dd = d assert kv == kd assert dd is None assert abs(xv - xd) < 1e-10 def test_graph_multi(): vals = [ (0, [None, None, None], [None]*3), (1, [1, None, float('nan')], [None]*3), (2, [2, 5, 4], [1]*3), (3, [3, 4, -60], [None]*3), (4, [4, 3, 2], [None]*3), (5, [None, 2, None], [None]*3), (6, [6, 1, None], [None]*3) ] filled_vals = [ (1, [1, 5, 4]), (2, [2, 5, 4]), (3, [3, 4, -60]), (4, [4, 3, 2]), (5, [4, 2, 2]), (6, [6, 1, 2]) ] # Should give same data back, with missing data at edges removed g = Graph('foo', {}) for k, v, dv in vals: g.add_data_point(k, v, dv) data = g.get_data() assert data[0] == (1, [1, None, None], [None, None, None]) assert data[1:] == vals[2:] # Should average duplicate values g = Graph('foo', {}) g.add_data_point(4, [1, 2, 3]) for k, v, dv in vals: g.add_data_point(k, v, dv) g.add_data_point(4, [3, 2, 1]) data = g.get_data() assert data[3][0] == 4 assert abs(data[3][1][0] - (1 + 4 + 3)/3.) < 1e-10 assert abs(data[3][1][1] - (2 + 3 + 2)/3.) < 1e-10 assert abs(data[3][1][2] - (3 + 2 + 1)/3.) < 1e-10 # The summary graph is obtained by geometric mean of filled data g = Graph('foo', {}) for k, v, dv in vals: g.add_data_point(k, v, dv) g = make_summary_graph([g]) data = g.get_data() for v, d in zip(filled_vals, data): kv, xvs = v kd, xd, dd = d assert kv == kd # geom mean, with some sign convention expected = _sgn(sum(xvs)) * (abs(xvs[0]*xvs[1]*xvs[2]))**(1./3) assert abs(xd - expected) < 1e-10 # Test summary over separate graphs -- should behave as if the # data was in a single graph g0 = Graph('foo', {}) g1 = Graph('foo', {}) g2 = Graph('foo', {}) for k, v, dv in vals: g0.add_data_point(k, v, dv) g1.add_data_point(k, v[0], dv[0]) g2.add_data_point(k, v[1:], dv[1:]) data0 = make_summary_graph([g0]).get_data() data = make_summary_graph([g1, g2]).get_data() assert data == data0 # Check the above is true regardless if some x-values are missing g0.add_data_point(7, [None, 1, None]) g2.add_data_point(7, [1, None]) g0.add_data_point(4.5, [9, None, None]) g1.add_data_point(4.5, 9) data0 = make_summary_graph([g0]).get_data() data = make_summary_graph([g1, g2]).get_data() assert data == data0 def test_empty_graph(): g = Graph('foo', {}) g.add_data_point(1, None) g.add_data_point(2, None) g.add_data_point(3, None) data = g.get_data() assert data == [] g = Graph('foo', {}) g.add_data_point(1, None) g.add_data_point(1, [None, None]) g.add_data_point(2, [None, None]) g.add_data_point(3, None) g.add_data_point(4, [None, None]) data = g.get_data() assert data == [] def test_nan(): g = Graph('foo', {}) g.add_data_point(1, 1) g.add_data_point(2, 2) g.add_data_point(2, float('nan')) g.add_data_point(3, 3) g.add_data_point(4, float('nan')) data = g.get_data() assert data == [(1, 1, None), (2, 2, None), (3, 3, None)] g = Graph('foo', {}) g.add_data_point(1, None) g.add_data_point(1, [1, float('nan')]) g.add_data_point(2, [2, 2]) g.add_data_point(3, [float('nan'), float('nan')]) g.add_data_point(4, [None, float('nan')]) data = g.get_data() assert data == [(1, [1, None], [None, None]), (2, [2, 2], [None, None])] def test_summary_graph(): n = 2 * int(RESAMPLED_POINTS) g = Graph('foo', {}) for i in range(n): g.add_data_point(i, 0.1) g.add_data_point(n + i, 0.2) g = make_summary_graph([g]) data = g.get_data() assert len(data) == 512 for i in range(256): assert abs(data[i][1] - 0.1) < 1e-7 assert abs(data[256 + i][1] - 0.2) < 1e-7 def test_summary_graph_loop(): n = int(RESAMPLED_POINTS) # Resampling shouldn't get stuck in an infinite loop g = Graph('foo', {}) for j in range(n): g.add_data_point(j, 0.1) g = make_summary_graph([g]) data = g.get_data() assert len(data) == 1 assert data[0][0] == n assert abs(data[0][1] - 0.1) < 1e-7 def test_graph_steps(): vals = [(1, 1), (5, 1), (6, 1), (7, 1), (8, 1), (11, 2), (15, 2), (16, 2 + 1e-5), (17, 2), (18, 2)] g = Graph('foo', {}) for x, y in vals: g.add_data_point(x, y) steps = g.get_steps() lastval = steps[1][4] assert abs(lastval - 1e-5/5.0) < 1e-10 assert steps == [(1, 8+1, 1.0, 1.0, 0), (11, 18+1, 2.0, 2.0, lastval)] multi_g = Graph('foo', {}) for x, y in vals: multi_g.add_data_point(x, [y, y, y]) for s in multi_g.get_steps(): assert s == steps def test_graph_filename_sanitization(): g = Graph('hello:world', {'a/a': 'b>b', 'c*c': 'd\0\0d'}) assert g.path == os.path.join('graphs', 'a_a-b_b', 'c_c-d__d', 'hello_world') def _sgn(x): return 1 if x >= 0 else -1
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import os from os.path import join import six import pytest import tempfile import shutil import datetime from asv import config from asv import repo from asv import util try: import hglib except ImportError: hglib = None from . import tools def _test_generic_repo(conf, tmpdir, hash_range, master, branch, is_remote=False): workcopy_dir = tempfile.mkdtemp(dir=tmpdir, prefix="workcopy") os.rmdir(workcopy_dir) # check mirroring fails early if *mirror_dir* exists but is not # a mirror if is_remote: if os.path.isdir(conf.project): shutil.rmtree(conf.project) os.makedirs(join(conf.project, 'hello')) with pytest.raises(util.UserError): r = repo.get_repo(conf) shutil.rmtree(conf.project) # basic checkouts r = repo.get_repo(conf) # Subrepo creation r.checkout(workcopy_dir, master) assert os.path.exists(join(workcopy_dir, "setup.py")) for filename in ("README", "untracked"): with open(join(workcopy_dir, filename), "wb") as fd: fd.write(b"foo") # After checkout the subrepo has been cleaned r.checkout(workcopy_dir, branch) assert not os.path.exists(join(workcopy_dir, "untracked")) with open(join(workcopy_dir, "README"), "rb") as fd: data = fd.read(33) assert data == b"This is the asv_test_repo project" r.checkout(workcopy_dir, master) # check recovering from corruption for pth in ['.hg', '.git']: pth = os.path.join(workcopy_dir, pth) if os.path.isdir(pth): shutil.rmtree(pth) r.checkout(workcopy_dir, master) hashes = r.get_hashes_from_range(hash_range) assert len(hashes) == 4 dates = [r.get_date(hash) for hash in hashes] assert dates == sorted(dates)[::-1] tags = r.get_tags() for tag in tags: r.get_date_from_name(tag) def _test_branches(conf, branch_commits, require_describe=False): r = repo.get_repo(conf) assert len(conf.branches) == 2 for branch in conf.branches: commits = r.get_branch_commits(branch) for commit in branch_commits[branch]: assert commit in commits name = r.get_name_from_hash(commit) if require_describe: assert name is not None if name is not None: assert r.get_hash_from_name(name) == commit assert name in r.get_decorated_hash(commit) def test_repo_git(tmpdir): tmpdir = six.text_type(tmpdir) dvcs = tools.generate_test_repo(tmpdir, list(range(10)), dvcs_type='git', extra_branches=[('master~4', 'some-branch',[11, 12, 13])]) mirror_dir = join(tmpdir, "repo") def test_it(is_remote=False): conf = config.Config() conf.project = mirror_dir conf.repo = dvcs.path _test_generic_repo(conf, tmpdir, 'master~4..master', 'master', 'tag5', is_remote=is_remote) conf.branches = ['master', 'some-branch'] branch_commits = { 'master': [dvcs.get_hash('master'), dvcs.get_hash('master~6')], 'some-branch': [dvcs.get_hash('some-branch'), dvcs.get_hash('some-branch~6')] } _test_branches(conf, branch_commits, require_describe=True) test_it() # local repo, so it should not not have cloned it assert not os.path.isdir(mirror_dir) # try again, pretending the repo is not local from asv.plugins.git import Git old_local_method = Git.is_local_repo old_url_match = Git.url_match try: Git.is_local_repo = classmethod(lambda cls, path: path != dvcs.path and old_local_method(path)) Git.url_match = classmethod(lambda cls, url: os.path.isdir(url)) test_it(is_remote=True) assert os.path.isdir(mirror_dir) finally: Git.is_local_repo = old_local_method Git.url_match = old_url_match def test_repo_git_annotated_tag_date(tmpdir): tmpdir = six.text_type(tmpdir) dvcs = tools.generate_test_repo(tmpdir, list(range(5)), dvcs_type='git') conf = config.Config() conf.project = 'sometest' conf.repo = dvcs.path r = repo.get_repo(conf) d1 = r.get_date('tag1') d2 = r.get_date(r.get_hash_from_name('tag1')) assert d1 == d2 @pytest.mark.skipif(hglib is None, reason="needs hglib") def test_repo_hg(tmpdir): tmpdir = six.text_type(tmpdir) conf = config.Config() dvcs = tools.generate_test_repo(tmpdir, list(range(10)), dvcs_type='hg', extra_branches=[('default~4', 'somebranch',[11, 12, 13])]) mirror_dir = join(tmpdir, "repo") def test_it(is_remote=False): conf.project = mirror_dir conf.repo = dvcs.path _test_generic_repo(conf, tmpdir, hash_range="reverse(default~3::default)", master="default", branch="tag5", is_remote=is_remote) conf.branches = ['default', 'somebranch'] branch_commits = { 'default': [dvcs.get_hash('default'), dvcs.get_hash('default~6')], 'somebranch': [dvcs.get_hash('somebranch'), dvcs.get_hash('somebranch~6')] } _test_branches(conf, branch_commits) test_it() # local repo, so it should not not have cloned it assert not os.path.isdir(mirror_dir) # try again, pretending the repo is not local from asv.plugins.mercurial import Hg old_local_method = Hg.is_local_repo old_url_match = Hg.url_match try: Hg.is_local_repo = classmethod(lambda cls, path: path != dvcs.path and old_local_method(path)) Hg.url_match = classmethod(lambda cls, url: os.path.isdir(url)) test_it(is_remote=True) assert os.path.isdir(mirror_dir) finally: Hg.is_local_repo = old_local_method Hg.url_match = old_url_match @pytest.fixture(params=[ "git", pytest.param("hg", marks=pytest.mark.skipif(hglib is None, reason="needs hglib")), ]) def two_branch_repo_case(request, tmpdir): r""" This test ensure we follow the first parent in case of merges The revision graph looks like this: @ Revision 6 (default) | | o Revision 5 (stable) | | | o Merge master |/| o | Revision 4 | | o | Merge stable |\| o | Revision 3 | | | o Revision 2 |/ o Revision 1 """ dvcs_type = request.param tmpdir = six.text_type(tmpdir) if dvcs_type == "git": master = "master" elif dvcs_type == "hg": master = "default" dvcs = tools.generate_repo_from_ops(tmpdir, dvcs_type, [ ("commit", 1), ("checkout", "stable", master), ("commit", 2), ("checkout", master), ("commit", 3), ("merge", "stable"), ("commit", 4), ("checkout", "stable"), ("merge", master, "Merge master"), ("commit", 5), ("checkout", master), ("commit", 6), ]) conf = config.Config() conf.branches = [master, "stable"] conf.repo = dvcs.path conf.project = join(tmpdir, "repo") r = repo.get_repo(conf) return dvcs, master, r, conf def test_get_branch_commits(two_branch_repo_case): # Test that get_branch_commits() return an ordered list of commits (last # first) and follow first parent in case of merge dvcs, master, r, conf = two_branch_repo_case expected = { master: [ "Revision 6", "Revision 4", "Merge stable", "Revision 3", "Revision 1", ], "stable": [ "Revision 5", "Merge master", "Revision 2", "Revision 1", ], } for branch in conf.branches: commits = [ dvcs.get_commit_message(commit_hash) for commit_hash in r.get_branch_commits(branch) ] assert commits == expected[branch] @pytest.mark.parametrize("existing, expected", [ # No existing commit, we expect all commits in commit order, # master branch first ([], ["Revision 6", "Revision 4", "Merge stable", "Revision 3", "Revision 1", "Revision 5", "Merge master", "Revision 2"]), # New commits on each branch (["Revision 4", "Merge master"], ["Revision 6", "Revision 5"]), # No new commits (["Revision 6", "Revision 5"], []), # Missing all commits on one branch (case of new branch added in config) (["Revision 6"], ["Revision 5", "Merge master", "Revision 2", "Revision 1"]), ], ids=["all", "new", "no-new", "new-branch-added-in-config"]) def test_get_new_branch_commits(two_branch_repo_case, existing, expected): dvcs, master, r, conf = two_branch_repo_case existing_commits = set() for branch in conf.branches: for commit in r.get_branch_commits(branch): message = dvcs.get_commit_message(commit) if message in existing: existing_commits.add(commit) assert len(existing_commits) == len(existing) new_commits = r.get_new_branch_commits(conf.branches, existing_commits) commits = [dvcs.get_commit_message(commit) for commit in new_commits] assert commits == expected def test_git_submodule(tmpdir): tmpdir = six.text_type(tmpdir) # State 0 (no submodule) dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git') sub_dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git') ssub_dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type='git') commit_hash_0 = dvcs.get_hash("master") # State 1 (one submodule) dvcs.run_git(['submodule', 'add', sub_dvcs.path, 'sub1']) dvcs.commit('Add sub1') commit_hash_1 = dvcs.get_hash("master") # State 2 (one submodule with sub-submodule) dvcs.run_git(['submodule', 'update', '--init']) sub1_dvcs = tools.Git(join(dvcs.path, 'sub1')) sub_dvcs.run_git(['submodule', 'add', ssub_dvcs.path, 'ssub1']) sub_dvcs.commit('Add sub1') sub1_dvcs.run_git(['pull']) dvcs.run_git(['add', 'sub1']) dvcs.commit('Update sub1') sub1_hash_2 = sub1_dvcs.get_hash("master") commit_hash_2 = dvcs.get_hash("master") # State 3 (one submodule; sub-submodule removed) sub_dvcs.run_git(['rm', '-f', 'ssub1']) sub_dvcs.commit('Remove ssub1') sub1_dvcs.run_git(['pull']) dvcs.run_git(['add', 'sub1']) dvcs.commit('Update sub1 again') commit_hash_3 = dvcs.get_hash("master") # State 4 (back to one submodule with sub-submodule) sub1_dvcs.run_git(['checkout', sub1_hash_2]) dvcs.run_git(['add', 'sub1']) dvcs.commit('Update sub1 3rd time') commit_hash_4 = dvcs.get_hash("master") # State 5 (remove final submodule) dvcs.run_git(['rm', '-f', 'sub1']) dvcs.commit('Remove sub1') commit_hash_5 = dvcs.get_hash("master") # Verify clean operation conf = config.Config() conf.branches = [None] conf.repo = dvcs.path conf.project = join(tmpdir, "repo") r = repo.get_repo(conf) checkout_dir = join(tmpdir, "checkout") # State 0 r.checkout(checkout_dir, commit_hash_0) assert os.path.isfile(join(checkout_dir, 'README')) assert not os.path.exists(join(checkout_dir, 'sub1')) # State 1 r.checkout(checkout_dir, commit_hash_1) assert os.path.isfile(join(checkout_dir, 'sub1', 'README')) assert not os.path.exists(join(checkout_dir, 'sub1', 'ssub1')) # State 2 r.checkout(checkout_dir, commit_hash_2) assert os.path.isfile(join(checkout_dir, 'sub1', 'ssub1', 'README')) # State 3 r.checkout(checkout_dir, commit_hash_3) assert os.path.isfile(join(checkout_dir, 'sub1', 'README')) assert not os.path.exists(join(checkout_dir, 'sub1', 'ssub1')) # State 4 r.checkout(checkout_dir, commit_hash_4) assert os.path.isfile(join(checkout_dir, 'sub1', 'ssub1', 'README')) # State 4 (check clean -fdx runs in sub-sub modules) garbage_filename = join(checkout_dir, 'sub1', 'ssub1', '.garbage') util.write_json(garbage_filename, {}) assert os.path.isfile(garbage_filename) r.checkout(checkout_dir, commit_hash_4) assert not os.path.isfile(garbage_filename) # State 5 r.checkout(checkout_dir, commit_hash_5) assert os.path.isfile(join(checkout_dir, 'README')) assert not os.path.isdir(join(checkout_dir, 'sub1')) @pytest.mark.parametrize('dvcs_type', [ "git", pytest.param("hg", marks=pytest.mark.skipif(hglib is None, reason="needs hglib")) ]) def test_root_ceiling(dvcs_type, tmpdir): # Check that git/hg does not try to look for repository in parent # directories. tmpdir = six.text_type(tmpdir) dvcs1 = tools.generate_repo_from_ops(tmpdir, dvcs_type, [("commit", 1)]) dvcs2 = tools.generate_repo_from_ops(tmpdir, dvcs_type, [("commit", 2)]) commit1 = dvcs1.get_branch_hashes()[0] commit2 = dvcs2.get_branch_hashes()[0] conf = config.Config() conf.branches = [] conf.dvcs = dvcs_type conf.project = join(tmpdir, "repo") conf.repo = dvcs1.path r = repo.get_repo(conf) # Checkout into a subdir inside another repository workcopy_dir = join(dvcs2.path, "workcopy") r.checkout(workcopy_dir, commit1) # Corrupt the checkout for pth in ['.hg', '.git']: pth = os.path.join(workcopy_dir, pth) if os.path.isdir(pth): shutil.rmtree(pth) # Operation must fail (commit2 is not in dvcs1), not use the # parent repository with pytest.raises(Exception): r.checkout(workcopy_dir, commit2) @pytest.mark.parametrize('dvcs_type', [ "git", pytest.param("hg", marks=pytest.mark.skipif(hglib is None, reason="needs hglib")) ]) def test_no_such_name_error(dvcs_type, tmpdir): tmpdir = six.text_type(tmpdir) dvcs = tools.generate_test_repo(tmpdir, values=[0], dvcs_type=dvcs_type) conf = config.Config() conf.branches = [] conf.dvcs = dvcs_type conf.project = "project" conf.repo = dvcs.path r = repo.get_repo(conf) # Check that NoSuchNameError error gets raised correctly assert r.get_hash_from_name(None) == dvcs.get_hash(r._default_branch) with pytest.raises(repo.NoSuchNameError): r.get_hash_from_name("badbranch") if dvcs_type == "git": # Corrupted repository/etc should not give NoSuchNameError util.long_path_rmtree(join(dvcs.path, ".git")) with pytest.raises(Exception) as excinfo: r.get_hash_from_name(None) assert excinfo.type not in (AssertionError, repo.NoSuchNameError) elif dvcs_type == "hg": # hglib seems to do some caching, so this doesn't work pass @pytest.mark.parametrize('dvcs_type', [ "git", pytest.param("hg", marks=pytest.mark.skipif(hglib is None, reason="needs hglib")) ]) def test_filter_date_period(tmpdir, dvcs_type): tmpdir = six.text_type(tmpdir) dates = [ datetime.datetime(2001, 1, 1), datetime.datetime(2001, 1, 2), datetime.datetime(2001, 1, 8) ] dvcs = tools.generate_repo_from_ops( tmpdir, dvcs_type, [("commit", j, dates[j]) for j in range(len(dates))]) commits = dvcs.get_branch_hashes()[::-1] assert len(commits) == len(dates) conf = config.Config() conf.dvcs = dvcs_type conf.repo = dvcs.path r = repo.get_repo(conf) # Basic filtering weekly_commits = r.filter_date_period(commits, 60*60*24*7) assert weekly_commits == [commits[0], commits[2]] daily_commits = r.filter_date_period(commits, 60*60*24) assert daily_commits == commits # Test with old_commits specified monthly_commits = r.filter_date_period(commits[1:], 60*60*24*30, commits[:1]) assert monthly_commits == []
qwhelan/asv
test/test_repo.py
test/test_graph.py
from rtcclient.base import FieldBase from rtcclient import urlunquote, OrderedDict import logging import xmltodict import re import os class Role(FieldBase): """The role in the project area or team area""" log = logging.getLogger("models.Role") def __str__(self): return self.label class Member(FieldBase): """The member in the project area""" log = logging.getLogger("models.Member") def __init__(self, url, rtc_obj, raw_data=None): FieldBase.__init__(self, url, rtc_obj, raw_data=raw_data) # add a new attribute mainly for the un-recorded member use self.email = urlunquote(self.url.split("/")[-1]) def __str__(self): if hasattr(self, "title"): return self.title return self.email def _initialize(self): pass def __initialize(self): pass class Administrator(Member): """The administrator of the project area""" log = logging.getLogger("models.Administrator") class ItemType(FieldBase): """The workitem type""" log = logging.getLogger("models.ItemType") def __str__(self): return self.title class TeamArea(FieldBase): """The team area""" log = logging.getLogger("models.TeamArea") def __str__(self): return self.title class PlannedFor(FieldBase): """The project plannedfor defines a start and end date along with an iteration breakdown """ log = logging.getLogger("models.PlannedFor") def __str__(self): return self.title class FiledAgainst(FieldBase): """Category that identifies the component or functional area that the work item belongs to. """ log = logging.getLogger("models.FiledAgainst") def __str__(self): return self.title class FoundIn(FieldBase): """Release in which the issue described in the work item was identified. """ log = logging.getLogger("models.FoundIn") def __str__(self): return self.title class Severity(FieldBase): """Indication of the impact of the work item""" log = logging.getLogger("models.Severity") def __str__(self): return self.title class Priority(FieldBase): """Ranked importance of a work item""" log = logging.getLogger("models.Priority") def __str__(self): return self.title class Action(FieldBase): """The action to change the state of the workitem""" log = logging.getLogger("models.Action") def __str__(self): return self.title class State(FieldBase): """Status of the work item. For example, New, In Progress, or Resolved.""" log = logging.getLogger("models.State") def __str__(self): return self.title class Comment(FieldBase): """Comment about the work item""" log = logging.getLogger("models.Comment") def __init__(self, url, rtc_obj, raw_data=None): self.id = url.split("/")[-1] FieldBase.__init__(self, url, rtc_obj, raw_data) def __str__(self): return self.id class SavedQuery(FieldBase): """User saved query""" log = logging.getLogger("models.SavedQuery") def __init__(self, url, rtc_obj, raw_data=None): self.id = url.split("/")[-1] FieldBase.__init__(self, url, rtc_obj, raw_data) def __str__(self): return self.title class IncludedInBuild(FieldBase): """Which build includes the certain workitem""" log = logging.getLogger("models.IncludedInBuild") def __str__(self): return self.label class ChangeSet(FieldBase): """ChangeSet""" log = logging.getLogger("models.ChangeSet") def __str__(self): return self.label def getChanges(self): """Get all :class:`rtcclient.models.Change` objects in this changeset :return: a :class:`list` contains all the :class:`rtcclient.models.Change` objects :rtype: list """ identifier = self.url.split("/")[-1] resource_url = "/".join(["%s" % self.rtc_obj.url, "resource/itemOid", "com.ibm.team.scm.ChangeSet", "%s?_mediaType=text/xml" % identifier]) resp = self.get(resource_url, verify=False, proxies=self.rtc_obj.proxies, headers=self.rtc_obj.headers) raw_data = xmltodict.parse(resp.content).get("scm:ChangeSet") common_changes = dict() changes = raw_data.get("changes") for (key, value) in raw_data.items(): if key.startswith("@"): continue if "changes" != key: common_changes[key] = value return self._handle_changes(changes, common_changes) def _handle_changes(self, changes, common_changes): change_objs = list() if isinstance(changes, OrderedDict): # only one single change changes.update(common_changes) change_objs.append(Change(None, self.rtc_obj, raw_data=changes)) elif isinstance(changes, list): # multiple changes for change in changes: change.update(common_changes) change_objs.append(Change(None, self.rtc_obj, raw_data=change)) return change_objs class Change(FieldBase): """Change""" log = logging.getLogger("models.Change") def __init__(self, url, rtc_obj, raw_data=None): FieldBase.__init__(self, url, rtc_obj, raw_data) def __str__(self): return self.internalId def fetchBeforeStateFile(self, file_folder): """Fetch the initial file (before the change) to a folder If the file is newly added, then `None` will be returned. :param file_folder: the folder to store the file :return: the :class:`string` object :rtype: string """ if u"true" == self.before: self.log.info("This file is newly added. No previous file") else: self.log.info("Fetching initial file of this Change<%s>:" % self) return self._fetchFile(self.before, file_folder, override=False) def fetchAfterStateFile(self, file_folder): """Fetch the final file (after the change) to a folder If the file has been deleted, then `None` will be returned. :param file_folder: the folder to store the file :return: the :class:`string` object :rtype: string """ if u"true" == self.after: self.log.info("This file has been deleted successfully.") else: self.log.info("Fetching final file of this Change<%s>:" % self) return self._fetchFile(self.after, file_folder) def fetchCurrentFile(self, file_folder): """Fetch the current/final file (after the change) to a folder If the file has been deleted, then `None` will be returned. :param file_folder: the folder to store the file :return: the :class:`string` object :rtype: string """ return self.fetchAfterStateFile(file_folder) def _fetchFile(self, state_id, file_folder, override=True): if self.raw_data['item']['@xsi:type'] == 'scm:FolderHandle': return file_url = "/".join(["{0}/service", ("com.ibm.team.filesystem.service.internal." "rest.IFilesystemContentService"), "-", ("{1}?itemId={2}&stateId={3}" "&platformLineDelimiter=CRLF")]) file_url = file_url.format(self.rtc_obj.url, self.component, self.item, state_id) self.log.debug("Start fetching file from %s ..." % file_url) resp = self.get(file_url, verify=False, headers=self.rtc_obj.headers) file_name = re.findall(r".+filename\*=UTF-8''(.+)", resp.headers["content-disposition"])[0] file_path = os.path.join(file_folder, file_name) if not override and os.path.exists(file_path): return with open(file_path, "wb") as file_content: file_content.write(resp.content) self.log.info("Successfully Fetching '%s' to '%s'" % (file_name, file_path)) return file_path class Attachment(FieldBase): """Attachment of the work item""" log = logging.getLogger("models.Attachment") def __init__(self, url, rtc_obj, raw_data=None): FieldBase.__init__(self, url, rtc_obj, raw_data) def __str__(self): return self.identifier + ": " + self.title
import requests import pytest import utils_test from rtcclient.exception import BadValue, NotFound from rtcclient.workitem import Workitem from rtcclient.models import Comment, Action, State, IncludedInBuild from rtcclient.models import ChangeSet, Attachment class TestWorkitem: @pytest.fixture(autouse=True) def myrtcclient(self, rtcclient): myclient = rtcclient return myclient @pytest.fixture(autouse=True) def workitem1(self, myrtcclient): return Workitem("http://test.url:9443/jazz/oslc/workitems/161", myrtcclient, workitem_id=161, raw_data=utils_test.workitem1) @pytest.fixture def mock_get_comments(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("comments.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_comments(self, myrtcclient, mock_get_comments, workitem1): # Comment1 comment1_url = "/".join(["http://test.url:9443/jazz/oslc", "workitems/161/rtc_cm:comments/0"]) comment1 = Comment(comment1_url, myrtcclient, raw_data=utils_test.comment1) assert comment1.id == "0" assert str(comment1) == "0" assert comment1.created == "2015-07-27T02:35:47.391Z" assert comment1.creator == "tester1@email.com" assert comment1.description == "comment test" # Comment2 comment2_url = "/".join(["http://test.url:9443/jazz/oslc", "workitems/161/rtc_cm:comments/1"]) comment2 = Comment(comment2_url, myrtcclient, raw_data=utils_test.comment2) assert comment2.id == "1" assert str(comment2) == "1" assert comment2.created == "2015-07-27T10:48:55.197Z" assert comment2.creator == "tester2@email.com" assert comment2.description == "add comment test2" comments = workitem1.getComments() assert comments == [comment1, comment2] def test_get_comment(self, myrtcclient, mock_get_comments, workitem1): # test for invalid comment id invalid_comment_ids = ["", None, True, False, "test"] for invalid_comment_id in invalid_comment_ids: with pytest.raises(BadValue): workitem1.getCommentByID(invalid_comment_id) # test for valid comment id # Comment1 comment1_url = "/".join(["http://test.url:9443/jazz/oslc", "workitems/161/rtc_cm:comments/0"]) comment1 = Comment(comment1_url, myrtcclient, raw_data=utils_test.comment1) # Comment2 comment2_url = "/".join(["http://test.url:9443/jazz/oslc", "workitems/161/rtc_cm:comments/1"]) comment2 = Comment(comment2_url, myrtcclient, raw_data=utils_test.comment2) comment_valid_ids = [0, "0", u"0"] for comment_id in comment_valid_ids: comment = workitem1.getCommentByID(comment_id) assert comment == comment1 comment_valid_ids = [1, "1", u"1"] for comment_id in comment_valid_ids: comment = workitem1.getCommentByID(comment_id) assert comment == comment2 def test_add_comment(self, myrtcclient, mocker, workitem1): # TODO: add comment test pass @pytest.fixture def mock_get_subscribers(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 # mock_resp.content = utils_test.read_fixture("subscribers.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_susbscribers(self, myrtcclient, mock_get_subscribers): # TODO: add susbscirbers.xml pass @pytest.fixture def mock_get_actions(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("actions.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_actions(self, myrtcclient, mock_get_actions, workitem1): # Action1 action1_url = "/".join(["http://test.url:9443/jazz/oslc/workflows", "_CuZu0HUwEeKicpXBddtqNA/actions", "default_workflow/default_workflow.action.a1"]) action1 = Action(action1_url, myrtcclient, raw_data=utils_test.action1) assert str(action1) == "Close" assert action1.title == "Close" assert action1.identifier == "default_workflow.action.a1" # fake data: pls ignore these values assert action1.resultState == ["Close", "Start Working"] # Action2 action2_url = "/".join(["http://test.url:9443/jazz/oslc/workflows", "_CuZu0HUwEeKicpXBddtqNA/actions", "default_workflow/default_workflow.action.a2"]) action2 = Action(action2_url, myrtcclient, raw_data=utils_test.action2) assert str(action2) == "Start Working" assert action2.title == "Start Working" assert action2.identifier == "default_workflow.action.a2" # fake data: pls ignore these values assert action2.resultState == ["Close", "Start Working"] actions = workitem1.getActions() assert actions == [action1, action2] def get_test_action(self, myrtcclient, mock_get_actions, workitem1): # test for invalid name invalid_action_names = ["", u"", None, True, False] for invalid_action_name in invalid_action_names: with pytest.raises(BadValue): workitem1.getAction(invalid_action_name) # Action1 action1_url = "/".join(["http://test.url:9443/jazz/oslc/workflows", "_CuZu0HUwEeKicpXBddtqNA/actions", "default_workflow/default_workflow.action.a1"]) action1 = Action(action1_url, myrtcclient, raw_data=utils_test.action1) # Action2 action2_url = "/".join(["http://test.url:9443/jazz/oslc/workflows", "_CuZu0HUwEeKicpXBddtqNA/actions", "default_workflow/default_workflow.action.a2"]) action2 = Action(action2_url, myrtcclient, raw_data=utils_test.action2) # test for valid name action_valid_names = ["Close", u"Close"] for action_name in action_valid_names: action = workitem1.getAction(action_name) assert action == action1 # test for valid name action_valid_names = ["Start Working", u"Start Working"] for action_name in action_valid_names: action = workitem1.getAction(action_name) assert action == action2 # test for fake name action_fake_names = ["Fake_Action", u"Fake_Action"] for action_name in action_fake_names: with pytest.raises(NotFound): workitem1.getAction(action_name) @pytest.fixture def mock_get_states(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("states.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_states(self, myrtcclient, mock_get_states, workitem1): # State1 state1_url = "/".join(["http://test.url:9443/jazz/oslc/workflows", "_CuZu0HUwEeKicpXBddtqNA/states", "default_workflow/default_workflow.state.s1"]) state1 = State(state1_url, myrtcclient, raw_data=utils_test.state1) assert str(state1) == "Closed" assert state1.title == "Closed" assert state1.identifier == "default_workflow.state.s1" assert state1.group == "inprogress" # State2 state2_url = "/".join(["http://test.url:9443/jazz/oslc/workflows", "_CuZu0HUwEeKicpXBddtqNA/states", "default_workflow/default_workflow.state.s2"]) state2 = State(state2_url, myrtcclient, raw_data=utils_test.state2) assert str(state2) == "In Progress" assert state2.title == "In Progress" assert state2.identifier == "default_workflow.state.s2" assert state2.group == "closed" states = workitem1.getStates() assert states == [state1, state2] @pytest.fixture def mock_get_iib(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("includedinbuilds.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_includedinbuilds(self, myrtcclient, mock_get_iib, workitem1): # IncludedInBuild1 iib1_url = ("http://test.url:9443/jazz/resource/itemOid/" "com.ibm.team.build.BuildResult/_2NXr8Fx3EeWfxsy-c6nRWw") iib1 = IncludedInBuild(iib1_url, myrtcclient, raw_data=utils_test.includedinbuild1) assert iib1.url == iib1_url assert iib1.identifier == "_2NXr8Fx3EeWfxsy-c6nRWw" assert iib1.name == "20150916-0836" assert iib1.created == "2015-09-16T13:35:51.342Z" assert iib1.started == "2015-09-16T13:36:01.122Z" assert iib1.ended == "2015-09-16T13:43:20.183Z" assert iib1.reason == "MANUAL" assert iib1.state == "COMPLETED" assert iib1.verdict == "OK" assert iib1.subject is None # fake data assert iib1.plan == ("http://test.url:9443/jazz/oslc/automation/" "plans/_-xFK4AH0EeSgb7B1Epikyg") assert iib1.creator == ("http://test.url:9443/jazz/oslc/automation/" "persons/_Ult00OjfEd6dKb6PaBIgvQ") assert iib1.contributions == ("http://test.url:9443/jazz/oslc/" "automation/results/" "_2NXr8Fx3EeWfxsy-c6nRWw/contributions") # IncludedInBuild2 iib2_url = ("http://test.url:9443/jazz/resource/itemOid/" "com.ibm.team.build.BuildResult/_b0KuAFuPEeWfxsy-c6nRWw") iib2 = IncludedInBuild(iib2_url, myrtcclient, raw_data=utils_test.includedinbuild2) assert iib2.url == iib2_url assert iib2.identifier == "_b0KuAFuPEeWfxsy-c6nRWw" assert iib2.name == "20150915-0452" assert iib2.created == "2015-09-15T09:52:10.975Z" assert iib2.started == "2015-09-15T09:52:19.544Z" assert iib2.ended == "2015-09-15T10:03:05.743Z" assert iib2.reason == "MANUAL" assert iib2.state == "COMPLETED" assert iib2.verdict == "OK" assert iib2.subject is None # fake data assert iib2.plan == ("http://test.url:9443/jazz/oslc/automation/" "plans/_0-o5QJ4DEeOwXZhplBr4Rw") assert iib2.creator == ("http://test.url:9443/jazz/oslc/automation/" "persons/_mYwwQGA5EeKE8fx0mWPe-A") assert iib2.contributions == ("http://test.url:9443/jazz/oslc/" "automation/results/" "_b0KuAFuPEeWfxsy-c6nRWw/contributions") iibs = workitem1.getIncludedInBuilds() assert iibs == [iib1, iib2] @pytest.fixture def mock_get_children(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("children.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_children(self, myrtcclient, mock_get_children, workitem1): # chidlren1 children1 = Workitem("http://test.url:9443/jazz/oslc/workitems/142990", myrtcclient, workitem_id=142990, raw_data=utils_test.children1) # chidlren2 children2 = Workitem("http://test.url:9443/jazz/oslc/workitems/142989", myrtcclient, workitem_id=142989, raw_data=utils_test.children1) children = workitem1.getChildren() assert children == [children1, children2] @pytest.fixture def mock_get_parent(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("parent.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_parent(self, myrtcclient, mock_get_parent, workitem1): # parent parent = Workitem("http://test.url:9443/jazz/oslc/workitems/141872", myrtcclient, workitem_id=141872, raw_data=utils_test.parent) assert workitem1.getParent() == parent @pytest.fixture def mock_get_changesets(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("changesets.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_changesets(self, myrtcclient, mock_get_changesets, workitem1): # changeset1 changeset1_url = ("http://test.url:9443/jazz/resource/itemOid/" "com.ibm.team.scm.ChangeSet/" "_VAjiUGHIEeWDLNtG9052Dw") changeset1 = ChangeSet(changeset1_url, myrtcclient, raw_data=utils_test.changeset1) assert changeset1.url == changeset1_url assert str(changeset1) == ("Changes 1 - Comment 1 - User1 - " "Sep 23, 2015 2:54 AM") # changeset2 changeset2_url = ("http://test.url:9443/jazz/resource/itemOid/" "com.ibm.team.scm.ChangeSet/" "_aVKuMGHWEeWDLNtG9052Dw") changeset2 = ChangeSet(changeset2_url, myrtcclient, raw_data=utils_test.changeset2) # changeset3 changeset3_url = ("http://test.url:9443/jazz/resource/itemOid/" "com.ibm.team.scm.ChangeSet/" "_nBUMsF0gEeWfxsy-c6nRWw") changeset3 = ChangeSet(changeset3_url, myrtcclient, raw_data=utils_test.changeset3) changesets = workitem1.getChangeSets() assert changesets == [changeset1, changeset2, changeset3] def test_add_attachment(self, myrtcclient, mocker, workitem1): # TODO: add attachment test pass @pytest.fixture def mock_get_attachments(self, mocker): mocked_get = mocker.patch("requests.get") mock_resp = mocker.MagicMock(spec=requests.Response) mock_resp.status_code = 200 mock_resp.content = utils_test.read_fixture("attachment.xml") mocked_get.return_value = mock_resp return mocked_get def test_get_attachments(self, myrtcclient, mock_get_attachments, workitem1): # Attachment1 attachment1_url = ("http://test.url:9443/ccm/resource/itemOid/" "com.ibm.team.workitem.Attachment/" "_bsU_gTk1EeeUpchvxQKZYg") attachment1 = Attachment(attachment1_url, myrtcclient, raw_data=utils_test.attachment1) assert attachment1.identifier == "22" assert str(attachment1) == "22: cgobench1.go" assert attachment1.title == "cgobench1.go" assert attachment1.description == "cgobench1.go" assert attachment1.contentLength == "351" assert attachment1.created == "2017-05-15T06:12:11.264Z" assert attachment1.creator == "tester1" assert attachment1.modified == "2017-05-15T06:12:11.440Z" assert attachment1.content == ("http://test.url:9443/ccm/resource/" "content/_bsRVIDk1EeeUpchvxQKZYg") # Attachment2 attachment2_url = ("http://test.url:9443/ccm/resource/itemOid/" "com.ibm.team.workitem.Attachment/" "_yUgkwTkeEeeUpchvxQKZYg") attachment2 = Attachment(attachment2_url, myrtcclient, raw_data=utils_test.attachment2) assert attachment2.identifier == "21" assert str(attachment2) == "21: cgobench2.go" assert attachment2.title == "cgobench2.go" assert attachment2.description == "cgobench2.go" assert attachment2.contentLength == "351" assert attachment2.created == "2017-05-15T03:30:04.686Z" assert attachment2.creator == "tester2" assert attachment2.modified == "2017-05-15T03:30:04.690Z" assert attachment2.content == ("http://test.url:9443/ccm/resource/" "content/_yUfWoDkeEeeUpchvxQKZYg") attachements = workitem1.getAttachments() assert attachements == [attachment1, attachment2]
dixudx/rtcclient
tests/test_workitem.py
rtcclient/models.py
# # @BEGIN LICENSE # # Psi4: an open-source quantum chemistry software package # # Copyright (c) 2007-2019 The Psi4 Developers. # # The copyrights for code used from other parties are included in # the corresponding files. # # This file is part of Psi4. # # Psi4 is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, version 3. # # Psi4 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with Psi4; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # @END LICENSE # """ | Database (Truhlar) of hydrogen-transfer barrier height reactions. | Geometries from Truhlar and coworkers at site http://t1.chem.umn.edu/misc/database_group/database_therm_bh/raw_geom.cgi . | Reference energies from Zhao et al. JPCA, 109 2012-2018 (2005) doi: 10.1021/jp045141s [in supporting information]. - **cp** ``'off'`` - **rlxd** ``'off'`` - **subset** - ``'small'`` - ``'large'`` """ import re import qcdb # <<< HTBH Database Module >>> dbse = 'HTBH' isOS = 'true' # <<< Database Members >>> HRXN = range(1, 39) HRXN_SM = ['5', '6', '9', '10', '23', '24'] HRXN_LG = ['13', '14', '33', '34', '37', '38'] # <<< Chemical Systems Involved >>> RXNM = {} # reaction matrix of reagent contributions per reaction ACTV = {} # order of active reagents per reaction ACTV['%s-%s' % (dbse, 1)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'HCl' ), '%s-%s-reagent' % (dbse, 'HHClts') ] RXNM['%s-%s' % (dbse, 1)] = dict(zip(ACTV['%s-%s' % (dbse, 1)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 2)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'Cl' ), '%s-%s-reagent' % (dbse, 'HHClts') ] RXNM['%s-%s' % (dbse, 2)] = dict(zip(ACTV['%s-%s' % (dbse, 2)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 3)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'OHH2ts') ] RXNM['%s-%s' % (dbse, 3)] = dict(zip(ACTV['%s-%s' % (dbse, 3)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 4)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'OHH2ts') ] RXNM['%s-%s' % (dbse, 4)] = dict(zip(ACTV['%s-%s' % (dbse, 4)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 5)] = ['%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'CH3H2ts') ] RXNM['%s-%s' % (dbse, 5)] = dict(zip(ACTV['%s-%s' % (dbse, 5)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 6)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'CH3H2ts') ] RXNM['%s-%s' % (dbse, 6)] = dict(zip(ACTV['%s-%s' % (dbse, 6)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 7)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'OHCH4ts') ] RXNM['%s-%s' % (dbse, 7)] = dict(zip(ACTV['%s-%s' % (dbse, 7)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 8)] = ['%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'OHCH4ts') ] RXNM['%s-%s' % (dbse, 8)] = dict(zip(ACTV['%s-%s' % (dbse, 8)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 9)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'HH2ts') ] RXNM['%s-%s' % (dbse, 9)] = dict(zip(ACTV['%s-%s' % (dbse, 9)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 10)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'HH2ts') ] RXNM['%s-%s' % (dbse, 10)] = dict(zip(ACTV['%s-%s' % (dbse, 10)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 11)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'NH3' ), '%s-%s-reagent' % (dbse, 'OHNH3ts') ] RXNM['%s-%s' % (dbse, 11)] = dict(zip(ACTV['%s-%s' % (dbse, 11)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 12)] = ['%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'OHNH3ts') ] RXNM['%s-%s' % (dbse, 12)] = dict(zip(ACTV['%s-%s' % (dbse, 12)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 13)] = ['%s-%s-reagent' % (dbse, 'HCl' ), '%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'HClCH3ts') ] RXNM['%s-%s' % (dbse, 13)] = dict(zip(ACTV['%s-%s' % (dbse, 13)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 14)] = ['%s-%s-reagent' % (dbse, 'Cl' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'HClCH3ts') ] RXNM['%s-%s' % (dbse, 14)] = dict(zip(ACTV['%s-%s' % (dbse, 14)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 15)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'C2H6' ), '%s-%s-reagent' % (dbse, 'OHC2H6ts') ] RXNM['%s-%s' % (dbse, 15)] = dict(zip(ACTV['%s-%s' % (dbse, 15)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 16)] = ['%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'C2H5' ), '%s-%s-reagent' % (dbse, 'OHC2H6ts') ] RXNM['%s-%s' % (dbse, 16)] = dict(zip(ACTV['%s-%s' % (dbse, 16)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 17)] = ['%s-%s-reagent' % (dbse, 'F' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'FH2ts') ] RXNM['%s-%s' % (dbse, 17)] = dict(zip(ACTV['%s-%s' % (dbse, 17)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 18)] = ['%s-%s-reagent' % (dbse, 'HF' ), '%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'FH2ts') ] RXNM['%s-%s' % (dbse, 18)] = dict(zip(ACTV['%s-%s' % (dbse, 18)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 19)] = ['%s-%s-reagent' % (dbse, 'O' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'OHCH3ts') ] RXNM['%s-%s' % (dbse, 19)] = dict(zip(ACTV['%s-%s' % (dbse, 19)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 20)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'OHCH3ts') ] RXNM['%s-%s' % (dbse, 20)] = dict(zip(ACTV['%s-%s' % (dbse, 20)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 21)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'PH3' ), '%s-%s-reagent' % (dbse, 'HPH3ts') ] RXNM['%s-%s' % (dbse, 21)] = dict(zip(ACTV['%s-%s' % (dbse, 21)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 22)] = ['%s-%s-reagent' % (dbse, 'PH2' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'HPH3ts') ] RXNM['%s-%s' % (dbse, 22)] = dict(zip(ACTV['%s-%s' % (dbse, 22)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 23)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'OHHts') ] RXNM['%s-%s' % (dbse, 23)] = dict(zip(ACTV['%s-%s' % (dbse, 23)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 24)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'O' ), '%s-%s-reagent' % (dbse, 'OHHts') ] RXNM['%s-%s' % (dbse, 24)] = dict(zip(ACTV['%s-%s' % (dbse, 24)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 25)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'H2S' ), '%s-%s-reagent' % (dbse, 'HH2Sts') ] RXNM['%s-%s' % (dbse, 25)] = dict(zip(ACTV['%s-%s' % (dbse, 25)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 26)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'HS' ), '%s-%s-reagent' % (dbse, 'HH2Sts') ] RXNM['%s-%s' % (dbse, 26)] = dict(zip(ACTV['%s-%s' % (dbse, 26)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 27)] = ['%s-%s-reagent' % (dbse, 'O' ), '%s-%s-reagent' % (dbse, 'HCl' ), '%s-%s-reagent' % (dbse, 'OHClts') ] RXNM['%s-%s' % (dbse, 27)] = dict(zip(ACTV['%s-%s' % (dbse, 27)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 28)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'Cl' ), '%s-%s-reagent' % (dbse, 'OHClts') ] RXNM['%s-%s' % (dbse, 28)] = dict(zip(ACTV['%s-%s' % (dbse, 28)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 29)] = ['%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'CH3NH2ts') ] RXNM['%s-%s' % (dbse, 29)] = dict(zip(ACTV['%s-%s' % (dbse, 29)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 30)] = ['%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'NH' ), '%s-%s-reagent' % (dbse, 'CH3NH2ts') ] RXNM['%s-%s' % (dbse, 30)] = dict(zip(ACTV['%s-%s' % (dbse, 30)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 31)] = ['%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'C2H5' ), '%s-%s-reagent' % (dbse, 'NH2C2H5ts') ] RXNM['%s-%s' % (dbse, 31)] = dict(zip(ACTV['%s-%s' % (dbse, 31)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 32)] = ['%s-%s-reagent' % (dbse, 'C2H6' ), '%s-%s-reagent' % (dbse, 'NH' ), '%s-%s-reagent' % (dbse, 'NH2C2H5ts') ] RXNM['%s-%s' % (dbse, 32)] = dict(zip(ACTV['%s-%s' % (dbse, 32)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 33)] = ['%s-%s-reagent' % (dbse, 'C2H6' ), '%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'C2H6NH2ts') ] RXNM['%s-%s' % (dbse, 33)] = dict(zip(ACTV['%s-%s' % (dbse, 33)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 34)] = ['%s-%s-reagent' % (dbse, 'NH3' ), '%s-%s-reagent' % (dbse, 'C2H5' ), '%s-%s-reagent' % (dbse, 'C2H6NH2ts') ] RXNM['%s-%s' % (dbse, 34)] = dict(zip(ACTV['%s-%s' % (dbse, 34)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 35)] = ['%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'NH2CH4ts') ] RXNM['%s-%s' % (dbse, 35)] = dict(zip(ACTV['%s-%s' % (dbse, 35)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 36)] = ['%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'NH3' ), '%s-%s-reagent' % (dbse, 'NH2CH4ts') ] RXNM['%s-%s' % (dbse, 36)] = dict(zip(ACTV['%s-%s' % (dbse, 36)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 37)] = ['%s-%s-reagent' % (dbse, 'C5H8' ), '%s-%s-reagent' % (dbse, 'C5H8ts') ] RXNM['%s-%s' % (dbse, 37)] = dict(zip(ACTV['%s-%s' % (dbse, 37)], [-1, +1])) ACTV['%s-%s' % (dbse, 38)] = ['%s-%s-reagent' % (dbse, 'C5H8' ), '%s-%s-reagent' % (dbse, 'C5H8ts') ] RXNM['%s-%s' % (dbse, 38)] = dict(zip(ACTV['%s-%s' % (dbse, 38)], [-1, +1])) # <<< Reference Values [kcal/mol] >>> BIND = {} BIND['%s-%s' % (dbse, 1)] = 5.7 BIND['%s-%s' % (dbse, 2)] = 8.7 BIND['%s-%s' % (dbse, 3)] = 5.1 BIND['%s-%s' % (dbse, 4)] = 21.2 BIND['%s-%s' % (dbse, 5)] = 12.1 BIND['%s-%s' % (dbse, 6)] = 15.3 BIND['%s-%s' % (dbse, 7)] = 6.7 BIND['%s-%s' % (dbse, 8)] = 19.6 BIND['%s-%s' % (dbse, 9)] = 9.6 BIND['%s-%s' % (dbse, 10)] = 9.6 BIND['%s-%s' % (dbse, 11)] = 3.2 BIND['%s-%s' % (dbse, 12)] = 12.7 BIND['%s-%s' % (dbse, 13)] = 1.7 BIND['%s-%s' % (dbse, 14)] = 7.9 BIND['%s-%s' % (dbse, 15)] = 3.4 BIND['%s-%s' % (dbse, 16)] = 19.9 BIND['%s-%s' % (dbse, 17)] = 1.8 BIND['%s-%s' % (dbse, 18)] = 33.4 BIND['%s-%s' % (dbse, 19)] = 13.7 BIND['%s-%s' % (dbse, 20)] = 8.1 BIND['%s-%s' % (dbse, 21)] = 3.1 BIND['%s-%s' % (dbse, 22)] = 23.2 BIND['%s-%s' % (dbse, 23)] = 10.7 BIND['%s-%s' % (dbse, 24)] = 13.1 BIND['%s-%s' % (dbse, 25)] = 3.5 BIND['%s-%s' % (dbse, 26)] = 17.3 BIND['%s-%s' % (dbse, 27)] = 9.8 BIND['%s-%s' % (dbse, 28)] = 10.4 BIND['%s-%s' % (dbse, 29)] = 8.0 BIND['%s-%s' % (dbse, 30)] = 22.4 BIND['%s-%s' % (dbse, 31)] = 7.5 BIND['%s-%s' % (dbse, 32)] = 18.3 BIND['%s-%s' % (dbse, 33)] = 10.4 BIND['%s-%s' % (dbse, 34)] = 17.4 BIND['%s-%s' % (dbse, 35)] = 14.5 BIND['%s-%s' % (dbse, 36)] = 17.8 BIND['%s-%s' % (dbse, 37)] = 38.4 BIND['%s-%s' % (dbse, 38)] = 38.4 # <<< Comment Lines >>> TAGL = {} TAGL['%s-%s' % (dbse, 1)] = '{ H + HCl <-- [HHCl] } --> H2 + Cl' TAGL['%s-%s' % (dbse, 2)] = 'H + HCl <-- { [HHCl] --> H2 + Cl }' TAGL['%s-%s' % (dbse, 3)] = '{ OH + H2 <-- [OHH2] } --> H + H2O' TAGL['%s-%s' % (dbse, 4)] = 'OH + HCl <-- { [OHH2] --> H + H2O }' TAGL['%s-%s' % (dbse, 5)] = '{ CH3 + H2 <-- [CH3H2] } --> H + CH4' TAGL['%s-%s' % (dbse, 6)] = 'CH3 + H2 <-- { [CH3H2] --> H + CH4 }' TAGL['%s-%s' % (dbse, 7)] = '{ OH + CH4 <-- [OHCH4] } --> CH3 + H2O' TAGL['%s-%s' % (dbse, 8)] = 'OH + CH4 <-- { [OHCH4] --> CH3 + H2O }' TAGL['%s-%s' % (dbse, 9)] = '{ H + H2 <-- [HH2] } --> H2 + H' TAGL['%s-%s' % (dbse, 10)] = 'H + H2 <-- { [HH2] -- >H2 + H }' TAGL['%s-%s' % (dbse, 11)] = '{ OH + NH3 <-- [OHNH3] } --> H2O + NH2' TAGL['%s-%s' % (dbse, 12)] = 'OH + NH3 <-- { [OHNH3] --> H2O + NH2 }' TAGL['%s-%s' % (dbse, 13)] = '{ HCl + CH3 <-- [HClCH3] } --> Cl + CH4' TAGL['%s-%s' % (dbse, 14)] = 'HCl + CH3 <-- { [HClCH3] --> Cl + CH4 }' TAGL['%s-%s' % (dbse, 15)] = '{ OH + C2H6 <-- [OHC2H6] } --> H2O + C2H5' TAGL['%s-%s' % (dbse, 16)] = 'OH + C2H6 <-- { [OHC2H6] --> H2O + C2H5 }' TAGL['%s-%s' % (dbse, 17)] = '{ F + H2 <-- [FH2] } --> HF + H' TAGL['%s-%s' % (dbse, 18)] = 'F + H2 <-- { [FH2] --> HF + H}' TAGL['%s-%s' % (dbse, 19)] = '{ O + CH4 <-- [OHCH3] } --> OH + CH3' TAGL['%s-%s' % (dbse, 20)] = 'O + CH4 <-- { [OHCH3] --> OH + CH3 }' TAGL['%s-%s' % (dbse, 21)] = '{ H + PH3 <-- [HPH3] } --> PH2 + H2' TAGL['%s-%s' % (dbse, 22)] = 'H + PH3 <-- { [HPH3] --> PH2 + H2 }' TAGL['%s-%s' % (dbse, 23)] = '{ H + OH <-- [OHH] } --> H2 + O' TAGL['%s-%s' % (dbse, 24)] = 'H + OH <-- { [OHH] --> H2 + O }' TAGL['%s-%s' % (dbse, 25)] = '{ H + H2S <-- [HH2S] } --> H2 + HS' TAGL['%s-%s' % (dbse, 26)] = 'H + H2S <-- { [HH2S] --> H2 + HS}' TAGL['%s-%s' % (dbse, 27)] = '{ O + HCl <-- [OHCl] } --> OH + Cl' TAGL['%s-%s' % (dbse, 28)] = 'O + HCl <-- { [OHCl] --> OH + Cl}' TAGL['%s-%s' % (dbse, 29)] = '{ NH2 + CH3 <-- [CH3NH2] } --> CH4 + NH' TAGL['%s-%s' % (dbse, 30)] = 'NH2 + CH3 <-- { [CH3NH2] --> CH4 + NH }' TAGL['%s-%s' % (dbse, 31)] = '{ NH2 + C2H5 <-- [NH2C2H5] } --> C2H6 + NH' TAGL['%s-%s' % (dbse, 32)] = 'NH2 + C2H5 <-- { [NH2C2H5] --> C2H6 + NH }' TAGL['%s-%s' % (dbse, 33)] = '{ C2H6 + NH2 <-- [C2H6NH2] } --> NH3 + C2H5' TAGL['%s-%s' % (dbse, 34)] = 'C2H6 + NH2 <-- { [C2H6NH2] --> NH3 + C2H5 }' TAGL['%s-%s' % (dbse, 35)] = '{ NH2 + CH4 <-- [NH2CH4] } --> CH3 + NH3' TAGL['%s-%s' % (dbse, 36)] = 'NH2 + CH4 <-- { [NH2CH4] --> CH3 + NH3 }' TAGL['%s-%s' % (dbse, 37)] = '{ C5H8 <-- [C5H8] } --> C5H8' TAGL['%s-%s' % (dbse, 38)] = 'C5H8 <-- { [C5H8] --> C5H8 }' TAGL['%s-%s-reagent' % (dbse, 'C2H5' )] = 'C2H5' TAGL['%s-%s-reagent' % (dbse, 'C2H6' )] = 'Ethane' TAGL['%s-%s-reagent' % (dbse, 'C2H6NH2ts' )] = 'Transition state of C2H6 + NH2 <--> NH3 + C2H5' TAGL['%s-%s-reagent' % (dbse, 'C5H8' )] = 's-trans cis-C5H8' TAGL['%s-%s-reagent' % (dbse, 'C5H8ts' )] = 'Transition state of s-trans cis-C5H8 <--> s-trans cis C5H8' TAGL['%s-%s-reagent' % (dbse, 'CH3' )] = 'CH3' TAGL['%s-%s-reagent' % (dbse, 'CH3H2ts' )] = 'Transition state of CH3 + H2 <--> H + CH4' TAGL['%s-%s-reagent' % (dbse, 'CH3NH2ts' )] = 'Transition state of CH3 + NH2 <--> CH4 + NH' TAGL['%s-%s-reagent' % (dbse, 'CH4' )] = 'Methane' TAGL['%s-%s-reagent' % (dbse, 'Cl' )] = 'Chlorine atom' TAGL['%s-%s-reagent' % (dbse, 'F' )] = 'Fluorine atom' TAGL['%s-%s-reagent' % (dbse, 'FH2ts' )] = 'Transition state of F + H2 <--> HF + H' TAGL['%s-%s-reagent' % (dbse, 'H' )] = 'Hydrogen atom' TAGL['%s-%s-reagent' % (dbse, 'H2' )] = 'Hydrogen molecule' TAGL['%s-%s-reagent' % (dbse, 'H2O' )] = 'Water' TAGL['%s-%s-reagent' % (dbse, 'H2S' )] = 'Hydrogen Sulfide' TAGL['%s-%s-reagent' % (dbse, 'HCl' )] = 'Hydrogen Chloride' TAGL['%s-%s-reagent' % (dbse, 'HClCH3ts' )] = 'Transition state of HCl + CH3 <--> Cl + CH4' TAGL['%s-%s-reagent' % (dbse, 'HHClts' )] = 'Transition state of H + HCl <--> H2 + Cl' TAGL['%s-%s-reagent' % (dbse, 'HF' )] = 'Hydrogen Fluoride' TAGL['%s-%s-reagent' % (dbse, 'HH2Sts' )] = 'Transition state of H + H2S <--> H2 + HS' TAGL['%s-%s-reagent' % (dbse, 'HH2ts' )] = 'Transition state of H + H2 <--> H2 + H' TAGL['%s-%s-reagent' % (dbse, 'NH' )] = 'NH' TAGL['%s-%s-reagent' % (dbse, 'HPH3ts' )] = 'Transition state of H + PH3 <--> PH2 + H2' TAGL['%s-%s-reagent' % (dbse, 'NH2' )] = 'NH2' TAGL['%s-%s-reagent' % (dbse, 'NH2C2H5ts' )] = 'Transition state of C2H5 + NH2 <--> NH + C2H6' TAGL['%s-%s-reagent' % (dbse, 'NH2CH4ts' )] = 'Transition state of CH4 + NH2 <--> NH3 + CH3' TAGL['%s-%s-reagent' % (dbse, 'NH3' )] = 'Ammonia' TAGL['%s-%s-reagent' % (dbse, 'O' )] = 'Oxygen atom' TAGL['%s-%s-reagent' % (dbse, 'OH' )] = 'OH' TAGL['%s-%s-reagent' % (dbse, 'OHC2H6ts' )] = 'Transition state of C2H6 + OH <--> H2O + C2H5' TAGL['%s-%s-reagent' % (dbse, 'OHCH3ts' )] = 'Transition state of O + CH4 <--> OH + CH3' TAGL['%s-%s-reagent' % (dbse, 'OHCH4ts' )] = 'Transition state of OH + CH4 <--> CH3 + H2O' TAGL['%s-%s-reagent' % (dbse, 'OHClts' )] = 'Transition state of O + HCl <--> OH + Cl' TAGL['%s-%s-reagent' % (dbse, 'OHH2ts' )] = 'Transition state of OH + H2 <--> H + H2O' TAGL['%s-%s-reagent' % (dbse, 'OHHts' )] = 'Transition state of OH + H <--> H2 + O' TAGL['%s-%s-reagent' % (dbse, 'OHNH3ts' )] = 'Transition state of OH + NH3 <--> NH2 + H2O' TAGL['%s-%s-reagent' % (dbse, 'PH2' )] = 'PH2' TAGL['%s-%s-reagent' % (dbse, 'PH3' )] = 'Phosphine' TAGL['%s-%s-reagent' % (dbse, 'HS' )] = 'HS' # <<< Geometry Specification Strings >>> GEOS = {} GEOS['%s-%s-reagent' % (dbse, 'C2H5')] = qcdb.Molecule(""" 0 2 C 0.00550995 -0.00307714 -0.77443959 C 0.00550995 -0.00307714 0.71569982 H 0.00550995 -1.01684444 1.11670108 H 0.37964525 0.84547158 -1.32730429 H -0.88217468 0.49798042 1.12141209 H 0.87299475 0.52193057 1.11660682 H -0.50718726 -0.77526005 -1.32801142 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C2H6')] = qcdb.Molecule(""" 0 1 C 0.00000020 -0.00000013 -0.76309187 C 0.00000020 -0.00000013 0.76309163 H 0.00000020 -1.01606691 1.15831231 H -0.87903844 -0.50959541 -1.15830943 H -0.87994508 0.50802887 1.15831013 H 0.87993813 0.50804049 1.15830883 H -0.00180313 1.01606605 -1.15830975 H 0.88084363 -0.50646996 -1.15830912 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C2H6NH2ts')] = qcdb.Molecule(""" 0 2 C -1.48570000 -0.44815600 -0.00001900 C -0.50504200 0.70174000 0.00002900 N 1.86516100 -0.34016700 -0.00005700 H -1.35419300 -1.07650500 -0.88050300 H -1.35415900 -1.07661100 0.88038500 H -2.51702500 -0.08617300 0.00002500 H -0.52222400 1.31611800 -0.89721800 H -0.52220500 1.31602900 0.89733800 H 0.66504700 0.14796100 -0.00003400 H 2.24664400 0.15971700 -0.80480600 H 2.24643900 0.15913300 0.80515100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C5H8')] = qcdb.Molecule(""" 0 1 C -2.05563800 -0.61227200 0.00000700 C -1.23109600 0.64044800 0.00004900 C 0.10563400 0.73427300 0.00002600 C 1.05755500 -0.37440700 -0.00004400 C 2.38358300 -0.19893600 -0.00003600 H -2.70508500 -0.64159700 0.87713200 H -2.70512900 -0.64150800 -0.87708900 H -1.45133200 -1.51607900 -0.00005500 H -1.79366500 1.56758600 0.00010300 H 0.54575600 1.72564300 0.00006400 H 0.66526200 -1.38324200 -0.00010500 H 3.06468900 -1.03771900 -0.00008800 H 2.81927500 0.79228500 0.00002300 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C5H8ts')] = qcdb.Molecule(""" 0 1 C -1.29962300 -0.90485300 -0.02015500 C -1.20594700 0.50581700 -0.01341400 C 0.00000000 1.18336100 0.15330100 C 1.20594800 0.50581400 -0.01342200 C 1.29962600 -0.90485100 -0.02014700 H 2.16879700 -1.32754900 -0.51569700 H 1.03204100 -1.45438500 0.87316600 H 2.03713000 1.08558300 -0.39850400 H 0.00000100 2.26291300 0.08590500 H -2.03713300 1.08558700 -0.39848100 H -2.16879600 -1.32754000 -0.51571600 H -0.00001100 -1.18194200 -0.52080800 H -1.03205900 -1.45439400 0.87315800 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH3')] = qcdb.Molecule(""" 0 2 C 0.00000000 0.00000000 -0.00000000 H 0.00000000 0.00000000 1.07731727 H -0.00000000 0.93298412 -0.53865863 H 0.00000000 -0.93298412 -0.53865863 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH3H2ts')] = qcdb.Molecule(""" 0 2 C 0.00000000 0.26481300 0.00000000 H 1.05342900 0.51666800 0.00000000 H -0.52662700 0.51702500 0.91225000 H -0.52662700 0.51702500 -0.91225000 H -0.00026000 -1.11777100 0.00000000 H 0.00008400 -2.02182500 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH3NH2ts')] = qcdb.Molecule(""" 0 3 C -1.19957700 -0.01112600 -0.00003000 N 1.40071500 0.12986200 0.00001500 H -1.42666000 -0.51293200 0.93305700 H -1.41990700 -0.59138200 -0.88814300 H -1.52023700 1.02280600 -0.04578300 H 0.18892600 0.12689600 0.00100100 H 1.57033800 -0.88766700 -0.00005300 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH4')] = qcdb.Molecule(""" 0 1 C 0.00000000 0.00000000 0.00000000 H 0.00000000 1.08744517 0.00000000 H -0.51262657 -0.36248173 0.88789526 H -0.51262657 -0.36248173 -0.88789526 H 1.02525314 -0.36248173 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'Cl')] = qcdb.Molecule(""" 0 2 Cl 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'F')] = qcdb.Molecule(""" 0 2 F 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'FH2ts')] = qcdb.Molecule(""" 0 2 H 0.14656800 -1.12839000 0.00000000 F 0.00000000 0.33042200 0.00000000 H -0.14656800 -1.84541000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H')] = qcdb.Molecule(""" 0 2 H 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H2')] = qcdb.Molecule(""" 0 1 H 0.00000000 0.00000000 0.00000000 H 0.74187646 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H2O')] = qcdb.Molecule(""" 0 1 O 0.00000000 0.00000000 -0.06555155 H 0.00000000 -0.75670946 0.52017534 H 0.00000000 0.75670946 0.52017534 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H2S')] = qcdb.Molecule(""" 0 1 S 0.00000000 0.00000000 0.10251900 H 0.00000000 0.96624900 -0.82015400 H 0.00000000 -0.96624900 -0.82015400 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HCl')] = qcdb.Molecule(""" 0 1 Cl 0.00000000 0.00000000 0.00000000 H 1.27444789 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HClCH3ts')] = qcdb.Molecule(""" 0 2 C 0.24411700 0.59991600 1.70242300 H -0.67559700 0.27848200 2.17293900 H 0.35191000 1.66378600 1.53767200 H 1.14068600 0.06578700 1.98782200 H 0.05716300 0.13997300 0.39711200 Cl -0.13758000 -0.33809000 -0.95941600 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HHClts')] = qcdb.Molecule(""" 0 2 H 0.00048000 -1.34062700 0.00000000 Cl 0.00000000 0.20325200 0.00000000 H -0.00048000 -2.11465900 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HF')] = qcdb.Molecule(""" 0 1 F 0.00000000 0.00000000 0.00000000 H 0.91538107 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HH2Sts')] = qcdb.Molecule(""" 0 2 H 1.26209700 -0.22009700 0.00000000 S 0.00000000 0.22315300 0.00000000 H -0.50057600 -1.11544500 0.00000000 H -0.76152100 -2.23491300 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HH2ts')] = qcdb.Molecule(""" 0 2 H 0.00000000 0.00000000 0.00000000 H 0.00000000 0.00000000 0.92947400 H 0.00000000 0.00000000 -0.92947400 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH')] = qcdb.Molecule(""" 0 3 N 0.00000000 0.00000000 0.00000000 H 1.03673136 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HPH3ts')] = qcdb.Molecule(""" 0 2 P 0.21742900 0.00008800 -0.11124900 H 0.24660900 1.03466800 0.85216400 H 0.26266100 -1.02505800 0.86162300 H -1.26641800 -0.01095200 -0.15062600 H -2.50429000 0.00002800 0.10557500 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH2')] = qcdb.Molecule(""" 0 2 N 0.00000000 0.00000000 -0.08007491 H 0.00000000 -0.80231373 0.55629442 H 0.00000000 0.80231373 0.55629442 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH2C2H5ts')] = qcdb.Molecule(""" 0 3 C -1.39498400 -0.44966100 0.00070300 C -0.43574600 0.71406300 0.00202700 N 1.92757000 -0.37835200 0.00303600 H -1.20008700 -1.12095100 -0.83568700 H -1.32209500 -1.02788400 0.92177300 H -2.42871300 -0.10535200 -0.08933400 H -0.41768800 1.30848200 -0.90720100 H -0.44112700 1.32909500 0.89746700 H 0.82850100 0.18059300 -0.02856100 H 2.47259200 0.49807300 0.00391000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH2CH4ts')] = qcdb.Molecule(""" 0 2 C -1.26075000 -0.00000600 0.01229100 N 1.31325500 -0.00000500 -0.13678200 H -1.58398700 0.90853800 -0.48474400 H -1.46367200 -0.00457300 1.07730200 H -1.58474800 -0.90388000 -0.49270000 H 0.04310800 -0.00006400 -0.15169200 H 1.48045900 0.80557700 0.46775100 H 1.48055700 -0.80552400 0.46780800 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH3')] = qcdb.Molecule(""" 0 1 N 0.00000000 0.00000000 0.11289000 H 0.00000000 0.93802400 -0.26340900 H 0.81235300 -0.46901200 -0.26340900 H -0.81235300 -0.46901200 -0.26340900 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'O')] = qcdb.Molecule(""" 0 3 O 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OH')] = qcdb.Molecule(""" 0 2 O 0.00000000 0.00000000 0.00000000 H 0.96889819 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHC2H6ts')] = qcdb.Molecule(""" 0 2 C 1.45833400 -0.44636500 0.02547800 C 0.46942300 0.69742200 -0.02749300 O -1.85303700 -0.31465900 -0.05305500 H 1.30176400 -1.06107900 0.91073700 H 1.36658500 -1.08618900 -0.85111800 H 2.48224500 -0.06687900 0.05715000 H 0.47106900 1.32544300 0.86103700 H 0.53352400 1.30349500 -0.92856000 H -0.63023200 0.20781600 -0.07846500 H -2.26720700 0.38832100 0.46575100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHCH3ts')] = qcdb.Molecule(""" 0 3 C 0.00029000 -1.14228900 0.00000000 H -1.05595700 -1.38473500 0.00000000 H 0.52016700 -1.40738900 0.91244700 H 0.52016700 -1.40738900 -0.91244700 H 0.01156000 0.16009900 0.00000000 O 0.00029000 1.36164300 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHCH4ts')] = qcdb.Molecule(""" 0 2 C -1.21148700 0.00796800 0.00040700 O 1.29396500 -0.10869400 0.00013300 H 0.00947600 -0.11802000 0.00279900 H -1.52552900 -0.23325000 1.01007000 H -1.43066500 1.03323300 -0.27808200 H -1.55271000 -0.71011400 -0.73770200 H 1.41663600 0.84989400 -0.00059100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHClts')] = qcdb.Molecule(""" 0 3 Cl 0.01882000 -0.81730100 0.00000000 H -0.47048800 0.56948000 0.00000000 O 0.01882000 1.66557900 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHH2ts')] = qcdb.Molecule(""" 0 2 O -0.30106400 -0.10804900 -0.00000800 H -0.42794500 0.85156900 0.00001600 H 1.01548600 -0.10036700 0.00011900 H 1.82096800 0.11318700 -0.00007300 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHHts')] = qcdb.Molecule(""" 0 3 H 0.00000000 0.00000000 -0.86028700 O 0.00000000 0.00000000 0.32902400 H 0.00000000 0.00000000 -1.77190500 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHNH3ts')] = qcdb.Molecule(""" 0 2 N -1.15081600 -0.04393200 -0.10255900 O 1.17918600 -0.09269600 -0.01029000 H -1.30318500 -0.54763800 0.76657100 H -1.33891300 0.93580800 0.09185400 H -0.03068700 -0.15383400 -0.35318400 H 1.29500900 0.81475300 0.29499100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'PH2')] = qcdb.Molecule(""" 0 2 P 0.00000000 0.00000000 -0.11565700 H 1.02013000 0.00000000 0.86742700 H -1.02013000 0.00000000 0.86742700 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'PH3')] = qcdb.Molecule(""" 0 1 P 0.00000000 0.00000000 0.12641100 H 1.19133900 0.00000000 -0.63205600 H -0.59566900 -1.03173000 -0.63205600 H -0.59566900 1.03173000 -0.63205600 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HS')] = qcdb.Molecule(""" 0 2 S 0.00000000 0.00000000 0.00000000 H 1.34020229 0.00000000 0.00000000 units angstrom """) ######################################################################### # <<< Supplementary Quantum Chemical Results >>> DATA = {} DATA['NUCLEAR REPULSION ENERGY'] = {} DATA['NUCLEAR REPULSION ENERGY']['HTBH-H-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HCl-reagent' ] = 7.05875275 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HHClts-reagent' ] = 10.39163823 DATA['NUCLEAR REPULSION ENERGY']['HTBH-H2-reagent' ] = 0.71329559 DATA['NUCLEAR REPULSION ENERGY']['HTBH-Cl-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OH-reagent' ] = 4.36931115 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHH2ts-reagent' ] = 10.73785396 DATA['NUCLEAR REPULSION ENERGY']['HTBH-H2O-reagent' ] = 9.19771594 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH3-reagent' ] = 9.69236444 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH3H2ts-reagent' ] = 15.32861238 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH4-reagent' ] = 13.46695412 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHCH4ts-reagent' ] = 37.11882096 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HH2ts-reagent' ] = 1.42332440 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH3-reagent' ] = 11.97232339 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHNH3ts-reagent' ] = 37.13900482 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH2-reagent' ] = 7.56429116 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HClCH3ts-reagent' ] = 46.25151943 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C2H6-reagent' ] = 42.29535986 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHC2H6ts-reagent' ] = 76.62129511 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C2H5-reagent' ] = 36.98165035 DATA['NUCLEAR REPULSION ENERGY']['HTBH-F-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-FH2ts-reagent' ] = 6.11540453 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HF-reagent' ] = 5.20285489 DATA['NUCLEAR REPULSION ENERGY']['HTBH-O-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHCH3ts-reagent' ] = 30.91033235 DATA['NUCLEAR REPULSION ENERGY']['HTBH-PH3-reagent' ] = 17.63061432 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HPH3ts-reagent' ] = 21.01063452 DATA['NUCLEAR REPULSION ENERGY']['HTBH-PH2-reagent' ] = 11.46498480 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHHts-reagent' ] = 6.15505787 DATA['NUCLEAR REPULSION ENERGY']['HTBH-H2S-reagent' ] = 12.94849742 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HH2Sts-reagent' ] = 16.45756641 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HS-reagent' ] = 6.31758012 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHClts-reagent' ] = 38.62988868 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH3NH2ts-reagent' ] = 33.45955425 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH-reagent' ] = 3.57299934 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH2C2H5ts-reagent' ] = 71.85720179 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C2H6NH2ts-reagent' ] = 78.78495055 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH2CH4ts-reagent' ] = 39.42842411 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C5H8-reagent' ] = 155.81524012 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C5H8ts-reagent' ] = 164.93671263
import os import pytest import psi4 pytestmark = pytest.mark.quick def test_fcidump_scf_energy(): """Compare FCIDUMP computed SCF energy against call to energy()""" Ne = psi4.geometry(""" Ne 0 0 0 """) psi4.set_options({'basis': 'cc-pVDZ', 'scf_type': 'pk', 'reference': 'uhf', 'd_convergence': 1e-8, 'e_convergence': 1e-8 }) scf_e, scf_wfn = psi4.energy('scf', return_wfn=True) psi4.fcidump(scf_wfn, fname='FCIDUMP_SCF', oe_ints=['EIGENVALUES']) intdump = psi4.fcidump_from_file('FCIDUMP_SCF') e_dict = psi4.energies_from_fcidump(intdump) fcidump_e = e_dict['SCF TOTAL ENERGY'] assert psi4.compare_values(scf_e, fcidump_e, 5, 'SCF energy') #TEST def test_fcidump_mp2_energy(): """Compare FCIDUMP computed MP2 energy against call to energy()""" Ne = psi4.geometry(""" Ne 0 0 0 """) psi4.set_options({'basis': 'cc-pVDZ', 'scf_type': 'pk', 'reference': 'uhf', 'd_convergence': 1e-8, 'e_convergence': 1e-8 }) mp2_e, mp2_wfn = psi4.energy('mp2', return_wfn=True) psi4.fcidump(mp2_wfn, fname='FCIDUMP_MP2', oe_ints=['EIGENVALUES']) intdump = psi4.fcidump_from_file('FCIDUMP_MP2') e_dict = psi4.energies_from_fcidump(intdump) fcidump_e = e_dict['SCF TOTAL ENERGY'] + e_dict['MP2 CORRELATION ENERGY'] assert psi4.compare_values(mp2_e, fcidump_e, 5, 'MP2 energy') #TEST
CDSherrill/psi4
tests/pytests/test_fcidump_energy.py
psi4/share/psi4/databases/HTBH.py
# # @BEGIN LICENSE # # Psi4: an open-source quantum chemistry software package # # Copyright (c) 2007-2019 The Psi4 Developers. # # The copyrights for code used from other parties are included in # the corresponding files. # # This file is part of Psi4. # # Psi4 is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, version 3. # # Psi4 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with Psi4; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # @END LICENSE # """Module with commands building :py:class:`~basislist.BasisFamily` objects for Pople and other non-Dunning orbital basis sets. Some plausible fitting basis sets are supplied as defaults. """ from .basislist import * def load_basfam_other(): # Pople basis_sto3g = BasisFamily('STO-3G', zeta=1) basis_321g = BasisFamily('3-21G', zeta=1) basisfamily_list.append(basis_sto3g) basisfamily_list.append(basis_321g) basis_631g = BasisFamily('6-31G', zeta=2) basis_631g_d_ = BasisFamily('6-31G(d)', zeta=2) basis_631g_d_p_ = BasisFamily('6-31G(d,p)', zeta=2) basis_631gs = BasisFamily('6-31G*', '6-31g_d_', zeta=2) basis_631gss = BasisFamily('6-31G**', '6-31g_d_p_', zeta=2) basis_631pg = BasisFamily('6-31+G', zeta=2) basis_631pg_d_ = BasisFamily('6-31+G(d)', zeta=2) basis_631pg_d_p_ = BasisFamily('6-31+G(d,p)', zeta=2) basis_631pgs = BasisFamily('6-31+G*', '6-31pg_d_', zeta=2) basis_631pgss = BasisFamily('6-31+G**', '6-31pg_d_p_', zeta=2) basis_631ppg = BasisFamily('6-31++G', zeta=2) basis_631ppg_d_ = BasisFamily('6-31++G(d)', zeta=2) basis_631ppg_d_p_ = BasisFamily('6-31++G(d,p)', zeta=2) basis_631ppgs = BasisFamily('6-31++G*', '6-31ppg_d_', zeta=2) basis_631ppgss = BasisFamily('6-31++G**', '6-31ppg_d_p_', zeta=2) basisfamily_list.append(basis_631g) basisfamily_list.append(basis_631g_d_) basisfamily_list.append(basis_631g_d_p_) basisfamily_list.append(basis_631gs) basisfamily_list.append(basis_631gss) basisfamily_list.append(basis_631pg) basisfamily_list.append(basis_631pg_d_) basisfamily_list.append(basis_631pg_d_p_) basisfamily_list.append(basis_631pgs) basisfamily_list.append(basis_631pgss) basisfamily_list.append(basis_631ppg) basisfamily_list.append(basis_631ppg_d_) basisfamily_list.append(basis_631ppg_d_p_) basisfamily_list.append(basis_631ppgs) basisfamily_list.append(basis_631ppgss) basis_6311g = BasisFamily('6-311G', zeta=3) basis_6311g_d_ = BasisFamily('6-311G(d)', zeta=3) basis_6311g_d_p_ = BasisFamily('6-311G(d,p)', zeta=3) basis_6311gs = BasisFamily('6-311G*', '6-311g_d_', zeta=3) basis_6311gss = BasisFamily('6-311G**', '6-311g_d_p_', zeta=3) basis_6311g_2d_ = BasisFamily('6-311G(2d)', zeta=3) basis_6311g_2d_p_ = BasisFamily('6-311G(2d,p)', zeta=3) basis_6311g_2d_2p_ = BasisFamily('6-311G(2d,2p)', zeta=3) basis_6311g_2df_ = BasisFamily('6-311G(2df)', zeta=3) basis_6311g_2df_p_ = BasisFamily('6-311G(2df,p)', zeta=3) basis_6311g_2df_2p_ = BasisFamily('6-311G(2df,2p)', zeta=3) basis_6311g_2df_2pd_ = BasisFamily('6-311G(2df,2pd)', zeta=3) basis_6311g_3df_ = BasisFamily('6-311G(3df)', zeta=3) basis_6311g_3df_p_ = BasisFamily('6-311G(3df,p)', zeta=3) basis_6311g_3df_2p_ = BasisFamily('6-311G(3df,2p)', zeta=3) basis_6311g_3df_2pd_ = BasisFamily('6-311G(3df,2pd)', zeta=3) basis_6311g_3df_3pd_ = BasisFamily('6-311G(3df,3pd)', zeta=3) basisfamily_list.append(basis_6311g) basisfamily_list.append(basis_6311g_d_) basisfamily_list.append(basis_6311g_d_p_) basisfamily_list.append(basis_6311gs) basisfamily_list.append(basis_6311gss) basisfamily_list.append(basis_6311g_2d_) basisfamily_list.append(basis_6311g_2d_p_) basisfamily_list.append(basis_6311g_2d_2p_) basisfamily_list.append(basis_6311g_2df_) basisfamily_list.append(basis_6311g_2df_p_) basisfamily_list.append(basis_6311g_2df_2p_) basisfamily_list.append(basis_6311g_2df_2pd_) basisfamily_list.append(basis_6311g_3df_) basisfamily_list.append(basis_6311g_3df_p_) basisfamily_list.append(basis_6311g_3df_2p_) basisfamily_list.append(basis_6311g_3df_2pd_) basisfamily_list.append(basis_6311g_3df_3pd_) basis_6311pg = BasisFamily('6-311+G', zeta=3) basis_6311pg_d_ = BasisFamily('6-311+G(d)', zeta=3) basis_6311pg_d_p_ = BasisFamily('6-311+G(d,p)', zeta=3) basis_6311pgs = BasisFamily('6-311+G*', '6-311pg_d_', zeta=3) basis_6311pgss = BasisFamily('6-311+G**', '6-311pg_d_p_', zeta=3) basis_6311pg_2d_ = BasisFamily('6-311+G(2d)', zeta=3) basis_6311pg_2d_p_ = BasisFamily('6-311+G(2d,p)', zeta=3) basis_6311pg_2d_2p_ = BasisFamily('6-311+G(2d,2p)', zeta=3) basis_6311pg_2df_ = BasisFamily('6-311+G(2df)', zeta=3) basis_6311pg_2df_p_ = BasisFamily('6-311+G(2df,p)', zeta=3) basis_6311pg_2df_2p_ = BasisFamily('6-311+G(2df,2p)', zeta=3) basis_6311pg_2df_2pd_ = BasisFamily('6-311+G(2df,2pd)', zeta=3) basis_6311pg_3df_ = BasisFamily('6-311+G(3df)', zeta=3) basis_6311pg_3df_p_ = BasisFamily('6-311+G(3df,p)', zeta=3) basis_6311pg_3df_2p_ = BasisFamily('6-311+G(3df,2p)', zeta=3) basis_6311pg_3df_2pd_ = BasisFamily('6-311+G(3df,2pd)', zeta=3) basis_6311pg_3df_3pd_ = BasisFamily('6-311+G(3df,3pd)', zeta=3) basisfamily_list.append(basis_6311pg) basisfamily_list.append(basis_6311pg_d_) basisfamily_list.append(basis_6311pg_d_p_) basisfamily_list.append(basis_6311pgs) basisfamily_list.append(basis_6311pgss) basisfamily_list.append(basis_6311pg_2d_) basisfamily_list.append(basis_6311pg_2d_p_) basisfamily_list.append(basis_6311pg_2d_2p_) basisfamily_list.append(basis_6311pg_2df_) basisfamily_list.append(basis_6311pg_2df_p_) basisfamily_list.append(basis_6311pg_2df_2p_) basisfamily_list.append(basis_6311pg_2df_2pd_) basisfamily_list.append(basis_6311pg_3df_) basisfamily_list.append(basis_6311pg_3df_p_) basisfamily_list.append(basis_6311pg_3df_2p_) basisfamily_list.append(basis_6311pg_3df_2pd_) basisfamily_list.append(basis_6311pg_3df_3pd_) basis_6311ppg = BasisFamily('6-311++G', zeta=3) basis_6311ppg_d_ = BasisFamily('6-311++G(d)', zeta=3) basis_6311ppg_d_p_ = BasisFamily('6-311++G(d,p)', zeta=3) basis_6311ppgs = BasisFamily('6-311++G*', '6-311ppg_d_', zeta=3) basis_6311ppgss = BasisFamily('6-311++G**', '6-311ppg_d_p_', zeta=3) basis_6311ppg_2d_ = BasisFamily('6-311++G(2d)', zeta=3) basis_6311ppg_2d_p_ = BasisFamily('6-311++G(2d,p)', zeta=3) basis_6311ppg_2d_2p_ = BasisFamily('6-311++G(2d,2p)', zeta=3) basis_6311ppg_2df_ = BasisFamily('6-311++G(2df)', zeta=3) basis_6311ppg_2df_p_ = BasisFamily('6-311++G(2df,p)', zeta=3) basis_6311ppg_2df_2p_ = BasisFamily('6-311++G(2df,2p)', zeta=3) basis_6311ppg_2df_2pd_ = BasisFamily('6-311++G(2df,2pd)', zeta=3) basis_6311ppg_3df_ = BasisFamily('6-311++G(3df)', zeta=3) basis_6311ppg_3df_p_ = BasisFamily('6-311++G(3df,p)', zeta=3) basis_6311ppg_3df_2p_ = BasisFamily('6-311++G(3df,2p)', zeta=3) basis_6311ppg_3df_2pd_ = BasisFamily('6-311++G(3df,2pd)', zeta=3) basis_6311ppg_3df_3pd_ = BasisFamily('6-311++G(3df,3pd)', zeta=3) basisfamily_list.append(basis_6311ppg) basisfamily_list.append(basis_6311ppg_d_) basisfamily_list.append(basis_6311ppg_d_p_) basisfamily_list.append(basis_6311ppgs) basisfamily_list.append(basis_6311ppgss) basisfamily_list.append(basis_6311ppg_2d_) basisfamily_list.append(basis_6311ppg_2d_p_) basisfamily_list.append(basis_6311ppg_2d_2p_) basisfamily_list.append(basis_6311ppg_2df_) basisfamily_list.append(basis_6311ppg_2df_p_) basisfamily_list.append(basis_6311ppg_2df_2p_) basisfamily_list.append(basis_6311ppg_2df_2pd_) basisfamily_list.append(basis_6311ppg_3df_) basisfamily_list.append(basis_6311ppg_3df_p_) basisfamily_list.append(basis_6311ppg_3df_2p_) basisfamily_list.append(basis_6311ppg_3df_2pd_) basisfamily_list.append(basis_6311ppg_3df_3pd_) # Ahlrichs basis_def2sv_p_ = BasisFamily('def2-SV(P)', zeta=2) basis_def2msvp = BasisFamily('def2-mSVP', zeta=2) basis_def2svp = BasisFamily('def2-SVP', zeta=2) basis_def2svpd = BasisFamily('def2-SVPD', zeta=2) basis_def2tzvp = BasisFamily('def2-TZVP', zeta=3) basis_def2tzvpd = BasisFamily('def2-TZVPD', zeta=3) basis_def2tzvpp = BasisFamily('def2-TZVPP', zeta=3) basis_def2tzvppd = BasisFamily('def2-TZVPPD', zeta=3) basis_def2qzvp = BasisFamily('def2-QZVP', zeta=4) basis_def2qzvpd = BasisFamily('def2-QZVPD', zeta=4) basis_def2qzvpp = BasisFamily('def2-QZVPP', zeta=4) basis_def2qzvppd = BasisFamily('def2-QZVPPD', zeta=4) basis_def2sv_p_.add_jfit('def2-universal-JFIT') basis_def2msvp.add_jfit('def2-universal-JFIT') basis_def2svp.add_jfit('def2-universal-JFIT') basis_def2svpd.add_jfit('def2-universal-JFIT') basis_def2tzvp.add_jfit('def2-universal-JFIT') basis_def2tzvpd.add_jfit('def2-universal-JFIT') basis_def2tzvpp.add_jfit('def2-universal-JFIT') basis_def2tzvppd.add_jfit('def2-universal-JFIT') basis_def2qzvp.add_jfit('def2-universal-JFIT') basis_def2qzvpd.add_jfit('def2-universal-JFIT') basis_def2qzvpp.add_jfit('def2-universal-JFIT') basis_def2qzvppd.add_jfit('def2-universal-JFIT') basis_def2sv_p_.add_jkfit('def2-universal-JKFIT') basis_def2msvp.add_jkfit('def2-universal-JKFIT') basis_def2svp.add_jkfit('def2-universal-JKFIT') basis_def2svpd.add_jkfit('def2-universal-JKFIT') basis_def2tzvp.add_jkfit('def2-universal-JKFIT') basis_def2tzvpd.add_jkfit('def2-universal-JKFIT') basis_def2tzvpp.add_jkfit('def2-universal-JKFIT') basis_def2tzvppd.add_jkfit('def2-universal-JKFIT') basis_def2qzvp.add_jkfit('def2-universal-JKFIT') basis_def2qzvpd.add_jkfit('def2-universal-JKFIT') basis_def2qzvpp.add_jkfit('def2-universal-JKFIT') basis_def2qzvppd.add_jkfit('def2-universal-JKFIT') basis_def2sv_p_.add_rifit('def2-SV(P)-RI') basis_def2msvp.add_rifit('def2-SVP-RI') basis_def2svp.add_rifit('def2-SVP-RI') basis_def2svpd.add_rifit('def2-SVPD-RI') basis_def2tzvp.add_rifit('def2-TZVP-RI') basis_def2tzvpd.add_rifit('def2-TZVPD-RI') basis_def2tzvpp.add_rifit('def2-TZVPP-RI') basis_def2tzvppd.add_rifit('def2-TZVPPD-RI') basis_def2qzvp.add_rifit('def2-QZVP-RI') basis_def2qzvpp.add_rifit('def2-QZVPP-RI') basis_def2qzvppd.add_rifit('def2-QZVPPD-RI') basisfamily_list.append(basis_def2sv_p_) basisfamily_list.append(basis_def2msvp) basisfamily_list.append(basis_def2svp) basisfamily_list.append(basis_def2svpd) basisfamily_list.append(basis_def2tzvp) basisfamily_list.append(basis_def2tzvpd) basisfamily_list.append(basis_def2tzvpp) basisfamily_list.append(basis_def2tzvppd) basisfamily_list.append(basis_def2qzvp) basisfamily_list.append(basis_def2qzvpd) basisfamily_list.append(basis_def2qzvpp) basisfamily_list.append(basis_def2qzvppd) # Jensen basis_augpcseg0 = BasisFamily('aug-pcseg-0', zeta=1) basis_augpcseg1 = BasisFamily('aug-pcseg-1', zeta=2) basis_augpcseg2 = BasisFamily('aug-pcseg-2', zeta=3) basis_augpcseg3 = BasisFamily('aug-pcseg-3', zeta=4) basis_augpcseg4 = BasisFamily('aug-pcseg-4', zeta=5) basis_augpcsseg0 = BasisFamily('aug-pcSseg-0', zeta=1) basis_augpcsseg1 = BasisFamily('aug-pcSseg-1', zeta=2) basis_augpcsseg2 = BasisFamily('aug-pcSseg-2', zeta=3) basis_augpcsseg3 = BasisFamily('aug-pcSseg-3', zeta=4) basis_augpcsseg4 = BasisFamily('aug-pcSseg-4', zeta=5) basis_pcseg0 = BasisFamily('pcseg-0', zeta=1) basis_pcseg1 = BasisFamily('pcseg-1', zeta=2) basis_pcseg2 = BasisFamily('pcseg-2', zeta=3) basis_pcseg3 = BasisFamily('pcseg-3', zeta=4) basis_pcseg4 = BasisFamily('pcseg-4', zeta=5) basis_pcsseg0 = BasisFamily('pcSseg-0', zeta=1) basis_pcsseg1 = BasisFamily('pcSseg-1', zeta=2) basis_pcsseg2 = BasisFamily('pcSseg-2', zeta=3) basis_pcsseg3 = BasisFamily('pcSseg-3', zeta=4) basis_pcsseg4 = BasisFamily('pcSseg-4', zeta=5) # Here lie practical (non-validated) fitting bases for # Jensen orbital basis sets basis_augpcseg0.add_jfit('def2-universal-JFIT') basis_augpcseg1.add_jfit('def2-universal-JFIT') basis_augpcseg2.add_jfit('def2-universal-JFIT') basis_augpcseg3.add_jfit('def2-universal-JFIT') basis_augpcsseg0.add_jfit('def2-universal-JFIT') basis_augpcsseg1.add_jfit('def2-universal-JFIT') basis_augpcsseg2.add_jfit('def2-universal-JFIT') basis_augpcsseg3.add_jfit('def2-universal-JFIT') basis_pcseg0.add_jfit('def2-universal-JFIT') basis_pcseg1.add_jfit('def2-universal-JFIT') basis_pcseg2.add_jfit('def2-universal-JFIT') basis_pcseg3.add_jfit('def2-universal-JFIT') basis_pcsseg0.add_jfit('def2-universal-JFIT') basis_pcsseg1.add_jfit('def2-universal-JFIT') basis_pcsseg2.add_jfit('def2-universal-JFIT') basis_pcsseg3.add_jfit('def2-universal-JFIT') basis_augpcseg0.add_jkfit('def2-universal-JKFIT') basis_augpcseg1.add_jkfit('def2-universal-JKFIT') basis_augpcseg2.add_jkfit('def2-universal-JKFIT') basis_augpcseg3.add_jkfit('def2-universal-JKFIT') basis_augpcseg4.add_jkfit('aug-cc-pV5Z-JKFIT') basis_augpcsseg0.add_jkfit('def2-universal-JKFIT') basis_augpcsseg1.add_jkfit('def2-universal-JKFIT') basis_augpcsseg2.add_jkfit('def2-universal-JKFIT') basis_augpcsseg3.add_jkfit('def2-universal-JKFIT') basis_augpcsseg4.add_jkfit('aug-cc-pV5Z-JKFIT') basis_pcseg0.add_jkfit('def2-universal-JKFIT') basis_pcseg1.add_jkfit('def2-universal-JKFIT') basis_pcseg2.add_jkfit('def2-universal-JKFIT') basis_pcseg3.add_jkfit('def2-universal-JKFIT') basis_pcseg4.add_jkfit('cc-pV5Z-JKFIT') basis_pcsseg0.add_jkfit('def2-universal-JKFIT') basis_pcsseg1.add_jkfit('def2-universal-JKFIT') basis_pcsseg2.add_jkfit('def2-universal-JKFIT') basis_pcsseg3.add_jkfit('def2-universal-JKFIT') basis_pcsseg4.add_jkfit('cc-pV5Z-JKFIT') basis_augpcseg0.add_rifit('def2-SV(P)-RI') basis_augpcseg1.add_rifit('def2-SVPD-RI') basis_augpcseg2.add_rifit('def2-TZVPPD-RI') basis_augpcseg3.add_rifit('def2-QZVPPD-RI') basis_augpcseg4.add_rifit('aug-cc-pV5Z-RI') basis_augpcsseg0.add_rifit('def2-SV(P)-RI') basis_augpcsseg1.add_rifit('def2-SVPD-RI') basis_augpcsseg2.add_rifit('def2-TZVPPD-RI') basis_augpcsseg3.add_rifit('def2-QZVPPD-RI') basis_augpcsseg4.add_rifit('aug-cc-pwCV5Z-RI') basis_pcseg0.add_rifit('def2-SV(P)-RI') basis_pcseg1.add_rifit('def2-SVP-RI') basis_pcseg2.add_rifit('def2-TZVPP-RI') basis_pcseg3.add_rifit('def2-QZVPP-RI') basis_pcseg4.add_rifit('cc-pV5Z-RI') basis_pcsseg0.add_rifit('def2-SV(P)-RI') basis_pcsseg1.add_rifit('def2-SVP-RI') basis_pcsseg2.add_rifit('def2-TZVPP-RI') basis_pcsseg3.add_rifit('def2-QZVPP-RI') basis_pcsseg4.add_rifit('cc-pwCV5Z-RI') basisfamily_list.append(basis_augpcseg0) basisfamily_list.append(basis_augpcseg1) basisfamily_list.append(basis_augpcseg2) basisfamily_list.append(basis_augpcseg3) basisfamily_list.append(basis_augpcseg4) basisfamily_list.append(basis_augpcsseg0) basisfamily_list.append(basis_augpcsseg1) basisfamily_list.append(basis_augpcsseg2) basisfamily_list.append(basis_augpcsseg3) basisfamily_list.append(basis_augpcsseg4) basisfamily_list.append(basis_pcseg0) basisfamily_list.append(basis_pcseg1) basisfamily_list.append(basis_pcseg2) basisfamily_list.append(basis_pcseg3) basisfamily_list.append(basis_pcseg4) basisfamily_list.append(basis_pcsseg0) basisfamily_list.append(basis_pcsseg1) basisfamily_list.append(basis_pcsseg2) basisfamily_list.append(basis_pcsseg3) basisfamily_list.append(basis_pcsseg4) # Minix basis_minix = BasisFamily('minix', zeta=2) basis_minix.add_jfit('def2-universal-JFIT') basis_minix.add_jkfit('def2-universal-JKFIT') basis_minix.add_rifit('def2-SVP-RI') basisfamily_list.append(basis_minix) # Others basis_dz = BasisFamily('DZ') basis_dzp = BasisFamily('DZP') basis_dzvp = BasisFamily('DZVP') basis_psi3dzp = BasisFamily('psi3-DZP') basis_psi3tz2p = BasisFamily('psi3-TZ2P') basis_psi3tz2pf = BasisFamily('psi3-TZ2PF') basis_sadlejlpoldl = BasisFamily('sadlej-lpol-dl') basis_sadlejlpolds = BasisFamily('sadlej-lpol-ds') basis_sadlejlpolfl = BasisFamily('sadlej-lpol-fl') basis_sadlejlpolfs = BasisFamily('sadlej-lpol-fs') basisfamily_list.append(basis_dz) basisfamily_list.append(basis_dzp) basisfamily_list.append(basis_dzvp) basisfamily_list.append(basis_psi3dzp) basisfamily_list.append(basis_psi3tz2p) basisfamily_list.append(basis_psi3tz2pf) basisfamily_list.append(basis_sadlejlpoldl) basisfamily_list.append(basis_sadlejlpolds) basisfamily_list.append(basis_sadlejlpolfl) basisfamily_list.append(basis_sadlejlpolfs) # Here lie practical (non-validated) fitting bases for # Pople orbital basis sets basis_sto3g.add_jkfit('def2-universal-JKFIT') basis_sto3g.add_rifit('def2-SVP-RIFIT') basis_321g.add_jkfit('def2-universal-JKFIT') basis_321g.add_rifit('def2-SVP-RIFIT') basis_631g.add_jkfit('cc-pvdz-jkfit') basis_631g_d_.add_jkfit('cc-pvdz-jkfit') basis_631g_d_p_.add_jkfit('cc-pvdz-jkfit') basis_631gs.add_jkfit('cc-pvdz-jkfit') basis_631gss.add_jkfit('cc-pvdz-jkfit') basis_631g.add_rifit('cc-pvdz-ri') basis_631g_d_.add_rifit('cc-pvdz-ri') basis_631g_d_p_.add_rifit('cc-pvdz-ri') basis_631gs.add_rifit('cc-pvdz-ri') basis_631gss.add_rifit('cc-pvdz-ri') basis_631pg.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pg_d_.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pg_d_p_.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pgs.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pgss.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pg.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pg_d_.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pg_d_p_.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pgs.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pgss.add_rifit('heavy-aug-cc-pvdz-ri') basis_631ppg.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppg_d_.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppg_d_p_.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppgs.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppgss.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppg.add_rifit('aug-cc-pvdz-ri') basis_631ppg_d_.add_rifit('aug-cc-pvdz-ri') basis_631ppg_d_p_.add_rifit('aug-cc-pvdz-ri') basis_631ppgs.add_rifit('aug-cc-pvdz-ri') basis_631ppgss.add_rifit('aug-cc-pvdz-ri') basis_6311g.add_jkfit('cc-pvtz-jkfit') basis_6311g_d_.add_jkfit('cc-pvtz-jkfit') basis_6311g_d_p_.add_jkfit('cc-pvtz-jkfit') basis_6311gs.add_jkfit('cc-pvtz-jkfit') basis_6311gss.add_jkfit('cc-pvtz-jkfit') basis_6311g_2d_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2d_p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2d_2p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_2p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_2pd_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_2p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_2pd_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_3pd_.add_jkfit('cc-pvtz-jkfit') basis_6311g.add_rifit('cc-pvtz-ri') basis_6311g_d_.add_rifit('cc-pvtz-ri') basis_6311g_d_p_.add_rifit('cc-pvtz-ri') basis_6311gs.add_rifit('cc-pvtz-ri') basis_6311gss.add_rifit('cc-pvtz-ri') basis_6311g_2d_.add_rifit('cc-pvtz-ri') basis_6311g_2d_p_.add_rifit('cc-pvtz-ri') basis_6311g_2d_2p_.add_rifit('cc-pvtz-ri') basis_6311g_2df_.add_rifit('cc-pvtz-ri') basis_6311g_2df_p_.add_rifit('cc-pvtz-ri') basis_6311g_2df_2p_.add_rifit('cc-pvtz-ri') basis_6311g_2df_2pd_.add_rifit('cc-pvtz-ri') basis_6311g_3df_.add_rifit('cc-pvtz-ri') basis_6311g_3df_p_.add_rifit('cc-pvtz-ri') basis_6311g_3df_2p_.add_rifit('cc-pvtz-ri') basis_6311g_3df_2pd_.add_rifit('cc-pvtz-ri') basis_6311g_3df_3pd_.add_rifit('cc-pvtz-ri') basis_6311pg.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_d_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_d_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pgs.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pgss.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2d_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2d_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2d_2p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_2p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_2pd_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_2p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_2pd_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_3pd_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_d_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_d_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pgs.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pgss.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2d_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2d_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2d_2p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_2p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_2pd_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_2p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_2pd_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_3pd_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311ppg.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_d_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_d_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppgs.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppgss.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2d_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2d_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2d_2p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_2p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_2pd_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_2p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_2pd_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_3pd_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_d_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_d_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppgs.add_rifit('aug-cc-pvtz-ri') basis_6311ppgss.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2d_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2d_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2d_2p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_2p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_2pd_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_2p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_2pd_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_3pd_.add_rifit('aug-cc-pvtz-ri') # Petersson's nZaPa-NR basis sets basis_2zapa_nr = BasisFamily('2zapa-nr',zeta=2) basis_3zapa_nr = BasisFamily('3zapa-nr',zeta=3) basis_4zapa_nr = BasisFamily('4zapa-nr',zeta=4) basis_5zapa_nr = BasisFamily('5zapa-nr',zeta=5) basis_6zapa_nr = BasisFamily('6zapa-nr',zeta=6) basis_7zapa_nr = BasisFamily('7zapa-nr',zeta=7) # fitting sets for nZaPa-NR # Dunnings zeta+1 to be safe, tested on water dimer # the full aug-JKFIT is possibly too much #--------SCF-JKFIT error for nZaPa-NR # results for GS energies of water dimer: # delta_jk = E_conv - E_DFJK # ZaPa zeta 2 : delta_jk = -0.000009 # ZaPa zeta 3 : delta_jk = -0.000002 # ZaPa zeta 4 : delta_jk = -0.000002 # ZaPa zeta 5 : delta_jk = -0.000002 # ZaPa zeta 6 : delta_jk = 0.000000 # ZaPa zeta 7 : delta_jk = 0.000000 basis_2zapa_nr.add_jkfit('aug-cc-pvtz-jkfit') basis_3zapa_nr.add_jkfit('aug-cc-pvqz-jkfit') basis_4zapa_nr.add_jkfit('aug-cc-pv5z-jkfit') basis_5zapa_nr.add_jkfit('aug-cc-pv5z-jkfit') basis_6zapa_nr.add_jkfit('aug-cc-pv6z-ri') basis_7zapa_nr.add_jkfit('aug-cc-pv6z-ri') basis_2zapa_nr.add_rifit('aug-cc-pvtz-ri') basis_3zapa_nr.add_rifit('aug-cc-pvqz-ri') basis_4zapa_nr.add_rifit('aug-cc-pv5z-ri') basis_5zapa_nr.add_rifit('aug-cc-pv6z-ri') basis_6zapa_nr.add_rifit('aug-cc-pv6z-ri') basis_7zapa_nr.add_rifit('aug-cc-pv6z-ri') basisfamily_list.append(basis_2zapa_nr) basisfamily_list.append(basis_3zapa_nr) basisfamily_list.append(basis_4zapa_nr) basisfamily_list.append(basis_5zapa_nr) basisfamily_list.append(basis_6zapa_nr) basisfamily_list.append(basis_7zapa_nr) # F12 basis sets basis_cc_pvdz_f12 = BasisFamily('cc-pvdz-f12',zeta=2) basis_cc_pvtz_f12 = BasisFamily('cc-pvtz-f12',zeta=3) basis_cc_pvqz_f12 = BasisFamily('cc-pvqz-f12',zeta=4) # basis_cc_pv5z_f12 = BasisFamily('cc-pV5Z-F12') # ORCA manual suggests for F12 basis sets Dunning's zeta+1 basis_cc_pvdz_f12.add_jkfit('cc-pvtz-jkfit') basis_cc_pvtz_f12.add_jkfit('cc-pvqz-jkfit') basis_cc_pvqz_f12.add_jkfit('cc-pv5z-jkfit') basis_cc_pvdz_f12.add_rifit('cc-pvtz-ri') basis_cc_pvtz_f12.add_rifit('cc-pvqz-ri') basis_cc_pvqz_f12.add_rifit('cc-pv5z-ri') basisfamily_list.append(basis_cc_pvqz_f12) basisfamily_list.append(basis_cc_pvtz_f12) basisfamily_list.append(basis_cc_pvqz_f12) # basisfamily_list.append(basis_cc_pv5z_f12)
import os import pytest import psi4 pytestmark = pytest.mark.quick def test_fcidump_scf_energy(): """Compare FCIDUMP computed SCF energy against call to energy()""" Ne = psi4.geometry(""" Ne 0 0 0 """) psi4.set_options({'basis': 'cc-pVDZ', 'scf_type': 'pk', 'reference': 'uhf', 'd_convergence': 1e-8, 'e_convergence': 1e-8 }) scf_e, scf_wfn = psi4.energy('scf', return_wfn=True) psi4.fcidump(scf_wfn, fname='FCIDUMP_SCF', oe_ints=['EIGENVALUES']) intdump = psi4.fcidump_from_file('FCIDUMP_SCF') e_dict = psi4.energies_from_fcidump(intdump) fcidump_e = e_dict['SCF TOTAL ENERGY'] assert psi4.compare_values(scf_e, fcidump_e, 5, 'SCF energy') #TEST def test_fcidump_mp2_energy(): """Compare FCIDUMP computed MP2 energy against call to energy()""" Ne = psi4.geometry(""" Ne 0 0 0 """) psi4.set_options({'basis': 'cc-pVDZ', 'scf_type': 'pk', 'reference': 'uhf', 'd_convergence': 1e-8, 'e_convergence': 1e-8 }) mp2_e, mp2_wfn = psi4.energy('mp2', return_wfn=True) psi4.fcidump(mp2_wfn, fname='FCIDUMP_MP2', oe_ints=['EIGENVALUES']) intdump = psi4.fcidump_from_file('FCIDUMP_MP2') e_dict = psi4.energies_from_fcidump(intdump) fcidump_e = e_dict['SCF TOTAL ENERGY'] + e_dict['MP2 CORRELATION ENERGY'] assert psi4.compare_values(mp2_e, fcidump_e, 5, 'MP2 energy') #TEST
CDSherrill/psi4
tests/pytests/test_fcidump_energy.py
psi4/driver/qcdb/basislistother.py
"""Helper methods to handle the time in Home Assistant.""" import datetime as dt import re from typing import Any, Dict, List, Optional, Union, cast import ciso8601 import pytz import pytz.exceptions as pytzexceptions import pytz.tzinfo as pytzinfo from homeassistant.const import MATCH_ALL DATE_STR_FORMAT = "%Y-%m-%d" NATIVE_UTC = dt.timezone.utc UTC = pytz.utc DEFAULT_TIME_ZONE: dt.tzinfo = pytz.utc # Copyright (c) Django Software Foundation and individual contributors. # All rights reserved. # https://github.com/django/django/blob/master/LICENSE DATETIME_RE = re.compile( r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})" r"[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})" r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?" r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$" ) def set_default_time_zone(time_zone: dt.tzinfo) -> None: """Set a default time zone to be used when none is specified. Async friendly. """ global DEFAULT_TIME_ZONE # pylint: disable=global-statement # NOTE: Remove in the future in favour of typing assert isinstance(time_zone, dt.tzinfo) DEFAULT_TIME_ZONE = time_zone def get_time_zone(time_zone_str: str) -> Optional[dt.tzinfo]: """Get time zone from string. Return None if unable to determine. Async friendly. """ try: return pytz.timezone(time_zone_str) except pytzexceptions.UnknownTimeZoneError: return None def utcnow() -> dt.datetime: """Get now in UTC time.""" return dt.datetime.now(NATIVE_UTC) def now(time_zone: Optional[dt.tzinfo] = None) -> dt.datetime: """Get now in specified time zone.""" return dt.datetime.now(time_zone or DEFAULT_TIME_ZONE) def as_utc(dattim: dt.datetime) -> dt.datetime: """Return a datetime as UTC time. Assumes datetime without tzinfo to be in the DEFAULT_TIME_ZONE. """ if dattim.tzinfo == UTC: return dattim if dattim.tzinfo is None: dattim = DEFAULT_TIME_ZONE.localize(dattim) # type: ignore return dattim.astimezone(UTC) def as_timestamp(dt_value: dt.datetime) -> float: """Convert a date/time into a unix time (seconds since 1970).""" if hasattr(dt_value, "timestamp"): parsed_dt: Optional[dt.datetime] = dt_value else: parsed_dt = parse_datetime(str(dt_value)) if parsed_dt is None: raise ValueError("not a valid date/time.") return parsed_dt.timestamp() def as_local(dattim: dt.datetime) -> dt.datetime: """Convert a UTC datetime object to local time zone.""" if dattim.tzinfo == DEFAULT_TIME_ZONE: return dattim if dattim.tzinfo is None: dattim = UTC.localize(dattim) return dattim.astimezone(DEFAULT_TIME_ZONE) def utc_from_timestamp(timestamp: float) -> dt.datetime: """Return a UTC time from a timestamp.""" return UTC.localize(dt.datetime.utcfromtimestamp(timestamp)) def start_of_local_day( dt_or_d: Union[dt.date, dt.datetime, None] = None ) -> dt.datetime: """Return local datetime object of start of day from date or datetime.""" if dt_or_d is None: date: dt.date = now().date() elif isinstance(dt_or_d, dt.datetime): date = dt_or_d.date() else: date = dt_or_d return DEFAULT_TIME_ZONE.localize( # type: ignore dt.datetime.combine(date, dt.time()) ) # Copyright (c) Django Software Foundation and individual contributors. # All rights reserved. # https://github.com/django/django/blob/master/LICENSE def parse_datetime(dt_str: str) -> Optional[dt.datetime]: """Parse a string and return a datetime.datetime. This function supports time zone offsets. When the input contains one, the output uses a timezone with a fixed offset from UTC. Raises ValueError if the input is well formatted but not a valid datetime. Returns None if the input isn't well formatted. """ try: return ciso8601.parse_datetime(dt_str) except (ValueError, IndexError): pass match = DATETIME_RE.match(dt_str) if not match: return None kws: Dict[str, Any] = match.groupdict() if kws["microsecond"]: kws["microsecond"] = kws["microsecond"].ljust(6, "0") tzinfo_str = kws.pop("tzinfo") tzinfo: Optional[dt.tzinfo] = None if tzinfo_str == "Z": tzinfo = UTC elif tzinfo_str is not None: offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0 offset_hours = int(tzinfo_str[1:3]) offset = dt.timedelta(hours=offset_hours, minutes=offset_mins) if tzinfo_str[0] == "-": offset = -offset tzinfo = dt.timezone(offset) kws = {k: int(v) for k, v in kws.items() if v is not None} kws["tzinfo"] = tzinfo return dt.datetime(**kws) def parse_date(dt_str: str) -> Optional[dt.date]: """Convert a date string to a date object.""" try: return dt.datetime.strptime(dt_str, DATE_STR_FORMAT).date() except ValueError: # If dt_str did not match our format return None def parse_time(time_str: str) -> Optional[dt.time]: """Parse a time string (00:20:00) into Time object. Return None if invalid. """ parts = str(time_str).split(":") if len(parts) < 2: return None try: hour = int(parts[0]) minute = int(parts[1]) second = int(parts[2]) if len(parts) > 2 else 0 return dt.time(hour, minute, second) except ValueError: # ValueError if value cannot be converted to an int or not in range return None def get_age(date: dt.datetime) -> str: """ Take a datetime and return its "age" as a string. The age can be in second, minute, hour, day, month or year. Only the biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will be returned. Make sure date is not in the future, or else it won't work. """ def formatn(number: int, unit: str) -> str: """Add "unit" if it's plural.""" if number == 1: return f"1 {unit}" return f"{number:d} {unit}s" delta = (now() - date).total_seconds() rounded_delta = round(delta) units = ["second", "minute", "hour", "day", "month"] factors = [60, 60, 24, 30, 12] selected_unit = "year" for i, next_factor in enumerate(factors): if rounded_delta < next_factor: selected_unit = units[i] break delta /= next_factor rounded_delta = round(delta) return formatn(rounded_delta, selected_unit) def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> List[int]: """Parse the time expression part and return a list of times to match.""" if parameter is None or parameter == MATCH_ALL: res = list(range(min_value, max_value + 1)) elif isinstance(parameter, str): if parameter.startswith("/"): parameter = int(parameter[1:]) res = [x for x in range(min_value, max_value + 1) if x % parameter == 0] else: res = [int(parameter)] elif not hasattr(parameter, "__iter__"): res = [int(parameter)] else: res = list(sorted(int(x) for x in parameter)) for val in res: if val < min_value or val > max_value: raise ValueError( f"Time expression '{parameter}': parameter {val} out of range " f"({min_value} to {max_value})" ) return res def find_next_time_expression_time( now: dt.datetime, # pylint: disable=redefined-outer-name seconds: List[int], minutes: List[int], hours: List[int], ) -> dt.datetime: """Find the next datetime from now for which the time expression matches. The algorithm looks at each time unit separately and tries to find the next one that matches for each. If any of them would roll over, all time units below that are reset to the first matching value. Timezones are also handled (the tzinfo of the now object is used), including daylight saving time. """ if not seconds or not minutes or not hours: raise ValueError("Cannot find a next time: Time expression never matches!") def _lower_bound(arr: List[int], cmp: int) -> Optional[int]: """Return the first value in arr greater or equal to cmp. Return None if no such value exists. """ left = 0 right = len(arr) while left < right: mid = (left + right) // 2 if arr[mid] < cmp: left = mid + 1 else: right = mid if left == len(arr): return None return arr[left] result = now.replace(microsecond=0) # Match next second next_second = _lower_bound(seconds, result.second) if next_second is None: # No second to match in this minute. Roll-over to next minute. next_second = seconds[0] result += dt.timedelta(minutes=1) result = result.replace(second=next_second) # Match next minute next_minute = _lower_bound(minutes, result.minute) if next_minute != result.minute: # We're in the next minute. Seconds needs to be reset. result = result.replace(second=seconds[0]) if next_minute is None: # No minute to match in this hour. Roll-over to next hour. next_minute = minutes[0] result += dt.timedelta(hours=1) result = result.replace(minute=next_minute) # Match next hour next_hour = _lower_bound(hours, result.hour) if next_hour != result.hour: # We're in the next hour. Seconds+minutes needs to be reset. result = result.replace(second=seconds[0], minute=minutes[0]) if next_hour is None: # No minute to match in this day. Roll-over to next day. next_hour = hours[0] result += dt.timedelta(days=1) result = result.replace(hour=next_hour) if result.tzinfo is None: return result # Now we need to handle timezones. We will make this datetime object # "naive" first and then re-convert it to the target timezone. # This is so that we can call pytz's localize and handle DST changes. tzinfo: pytzinfo.DstTzInfo = UTC if result.tzinfo == NATIVE_UTC else result.tzinfo result = result.replace(tzinfo=None) try: result = tzinfo.localize(result, is_dst=None) except pytzexceptions.AmbiguousTimeError: # This happens when we're leaving daylight saving time and local # clocks are rolled back. In this case, we want to trigger # on both the DST and non-DST time. So when "now" is in the DST # use the DST-on time, and if not, use the DST-off time. use_dst = bool(now.dst()) result = tzinfo.localize(result, is_dst=use_dst) except pytzexceptions.NonExistentTimeError: # This happens when we're entering daylight saving time and local # clocks are rolled forward, thus there are local times that do # not exist. In this case, we want to trigger on the next time # that *does* exist. # In the worst case, this will run through all the seconds in the # time shift, but that's max 3600 operations for once per year result = result.replace(tzinfo=tzinfo) + dt.timedelta(seconds=1) return find_next_time_expression_time(result, seconds, minutes, hours) result_dst = cast(dt.timedelta, result.dst()) now_dst = cast(dt.timedelta, now.dst()) or dt.timedelta(0) if result_dst >= now_dst: return result # Another edge-case when leaving DST: # When now is in DST and ambiguous *and* the next trigger time we *should* # trigger is ambiguous and outside DST, the excepts above won't catch it. # For example: if triggering on 2:30 and now is 28.10.2018 2:30 (in DST) # we should trigger next on 28.10.2018 2:30 (out of DST), but our # algorithm above would produce 29.10.2018 2:30 (out of DST) # Step 1: Check if now is ambiguous try: tzinfo.localize(now.replace(tzinfo=None), is_dst=None) return result except pytzexceptions.AmbiguousTimeError: pass # Step 2: Check if result of (now - DST) is ambiguous. check = now - now_dst check_result = find_next_time_expression_time(check, seconds, minutes, hours) try: tzinfo.localize(check_result.replace(tzinfo=None), is_dst=None) return result except pytzexceptions.AmbiguousTimeError: pass # OK, edge case does apply. We must override the DST to DST-off check_result = tzinfo.localize(check_result.replace(tzinfo=None), is_dst=False) return check_result
"""Test ZHA Device Tracker.""" from datetime import timedelta import time import pytest import zigpy.zcl.clusters.general as general from homeassistant.components.device_tracker import DOMAIN, SOURCE_TYPE_ROUTER from homeassistant.components.zha.core.registries import ( SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, ) from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE import homeassistant.util.dt as dt_util from .common import ( async_enable_traffic, async_test_rejoin, find_entity_id, send_attributes_report, ) from tests.common import async_fire_time_changed @pytest.fixture def zigpy_device_dt(zigpy_device_mock): """Device tracker zigpy device.""" endpoints = { 1: { "in_clusters": [ general.Basic.cluster_id, general.PowerConfiguration.cluster_id, general.Identify.cluster_id, general.PollControl.cluster_id, general.BinaryInput.cluster_id, ], "out_clusters": [general.Identify.cluster_id, general.Ota.cluster_id], "device_type": SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, } } return zigpy_device_mock(endpoints) async def test_device_tracker(hass, zha_device_joined_restored, zigpy_device_dt): """Test zha device tracker platform.""" zha_device = await zha_device_joined_restored(zigpy_device_dt) cluster = zigpy_device_dt.endpoints.get(1).power entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None assert hass.states.get(entity_id).state == STATE_HOME await async_enable_traffic(hass, [zha_device], enabled=False) # test that the device tracker was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE zigpy_device_dt.last_seen = time.time() - 120 next_update = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() # allow traffic to flow through the gateway and device await async_enable_traffic(hass, [zha_device]) # test that the state has changed from unavailable to not home assert hass.states.get(entity_id).state == STATE_NOT_HOME # turn state flip await send_attributes_report( hass, cluster, {0x0000: 0, 0x0020: 23, 0x0021: 200, 0x0001: 2} ) zigpy_device_dt.last_seen = time.time() + 10 next_update = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_HOME entity = hass.data[DOMAIN].get_entity(entity_id) assert entity.is_connected is True assert entity.source_type == SOURCE_TYPE_ROUTER assert entity.battery_level == 100 # test adding device tracker to the network and HA await async_test_rejoin(hass, zigpy_device_dt, [cluster], (2,)) assert hass.states.get(entity_id).state == STATE_HOME
partofthething/home-assistant
tests/components/zha/test_device_tracker.py
homeassistant/util/dt.py
"""HF-SEF dataset.""" from .hf_sef import data_path
# -*- coding: utf-8 -*- # Authors: Teon Brooks <teon.brooks@gmail.com> # Martin Billinger <martin.billinger@tugraz.at> # Alan Leggitt <alan.leggitt@ucsf.edu> # Alexandre Barachant <alexandre.barachant@gmail.com> # Stefan Appelhoff <stefan.appelhoff@mailbox.org> # Joan Massich <mailsik@gmail.com> # # License: BSD (3-clause) import os.path as op import inspect import numpy as np from numpy.testing import (assert_array_almost_equal, assert_array_equal, assert_equal, assert_allclose) from scipy.io import loadmat import pytest from mne import pick_types, Annotations from mne.datasets import testing from mne.utils import run_tests_if_main, requires_pandas from mne.io import read_raw_edf, read_raw_bdf from mne.io.tests.test_raw import _test_raw_reader from mne.io.edf.edf import _get_edf_default_event_id from mne.io.edf.edf import _read_annotations_edf from mne.io.edf.edf import _read_ch from mne.io.edf.edf import _parse_prefilter_string from mne.io.pick import channel_indices_by_type from mne.annotations import events_from_annotations, read_annotations from mne.io.meas_info import _kind_dict as _KIND_DICT FILE = inspect.getfile(inspect.currentframe()) data_dir = op.join(op.dirname(op.abspath(FILE)), 'data') montage_path = op.join(data_dir, 'biosemi.hpts') # XXX: missing reader bdf_path = op.join(data_dir, 'test.bdf') edf_path = op.join(data_dir, 'test.edf') duplicate_channel_labels_path = op.join(data_dir, 'duplicate_channel_labels.edf') edf_uneven_path = op.join(data_dir, 'test_uneven_samp.edf') bdf_eeglab_path = op.join(data_dir, 'test_bdf_eeglab.mat') edf_eeglab_path = op.join(data_dir, 'test_edf_eeglab.mat') edf_uneven_eeglab_path = op.join(data_dir, 'test_uneven_samp.mat') edf_stim_channel_path = op.join(data_dir, 'test_edf_stim_channel.edf') edf_txt_stim_channel_path = op.join(data_dir, 'test_edf_stim_channel.txt') data_path = testing.data_path(download=False) edf_stim_resamp_path = op.join(data_path, 'EDF', 'test_edf_stim_resamp.edf') edf_overlap_annot_path = op.join(data_path, 'EDF', 'test_edf_overlapping_annotations.edf') edf_reduced = op.join(data_path, 'EDF', 'test_reduced.edf') bdf_stim_channel_path = op.join(data_path, 'BDF', 'test_bdf_stim_channel.bdf') bdf_multiple_annotations_path = op.join(data_path, 'BDF', 'multiple_annotation_chans.bdf') test_generator_bdf = op.join(data_path, 'BDF', 'test_generator_2.bdf') test_generator_edf = op.join(data_path, 'EDF', 'test_generator_2.edf') edf_annot_sub_s_path = op.join(data_path, 'EDF', 'subsecond_starttime.edf') eog = ['REOG', 'LEOG', 'IEOG'] misc = ['EXG1', 'EXG5', 'EXG8', 'M1', 'M2'] def test_orig_units(): """Test exposure of original channel units.""" raw = read_raw_edf(edf_path, preload=True) # Test original units orig_units = raw._orig_units assert len(orig_units) == len(raw.ch_names) assert orig_units['A1'] == 'µV' # formerly 'uV' edit by _check_orig_units def test_bdf_data(): """Test reading raw bdf files.""" raw_py = _test_raw_reader(read_raw_bdf, input_fname=bdf_path, eog=eog, misc=misc, exclude=['M2', 'IEOG']) assert len(raw_py.ch_names) == 71 raw_py = _test_raw_reader(read_raw_bdf, input_fname=bdf_path, montage='biosemi64', eog=eog, misc=misc, exclude=['M2', 'IEOG']) assert len(raw_py.ch_names) == 71 assert 'RawEDF' in repr(raw_py) picks = pick_types(raw_py.info, meg=False, eeg=True, exclude='bads') data_py, _ = raw_py[picks] # this .mat was generated using the EEG Lab Biosemi Reader raw_eeglab = loadmat(bdf_eeglab_path) raw_eeglab = raw_eeglab['data'] * 1e-6 # data are stored in microvolts data_eeglab = raw_eeglab[picks] # bdf saved as a single, resolution to seven decimal points in matlab assert_array_almost_equal(data_py, data_eeglab, 8) # Manually checking that float coordinates are imported assert (raw_py.info['chs'][0]['loc']).any() assert (raw_py.info['chs'][25]['loc']).any() assert (raw_py.info['chs'][63]['loc']).any() @testing.requires_testing_data def test_bdf_crop_save_stim_channel(tmpdir): """Test EDF with various sampling rates.""" raw = read_raw_bdf(bdf_stim_channel_path) raw.save(tmpdir.join('test-raw.fif'), tmin=1.2, tmax=4.0, overwrite=True) @testing.requires_testing_data @pytest.mark.parametrize('fname', [ edf_reduced, edf_overlap_annot_path, ]) @pytest.mark.parametrize('stim_channel', (None, False, 'auto')) def test_edf_others(fname, stim_channel): """Test EDF with various sampling rates and overlapping annotations.""" _test_raw_reader( read_raw_edf, input_fname=fname, stim_channel=stim_channel, verbose='error') def test_edf_data_broken(tmpdir): """Test edf files.""" raw = _test_raw_reader(read_raw_edf, input_fname=edf_path, exclude=['Ergo-Left', 'H10'], verbose='error') raw_py = read_raw_edf(edf_path) data = raw_py.get_data() assert_equal(len(raw.ch_names) + 2, len(raw_py.ch_names)) # Test with number of records not in header (-1). broken_fname = op.join(tmpdir, 'broken.edf') with open(edf_path, 'rb') as fid_in: fid_in.seek(0, 2) n_bytes = fid_in.tell() fid_in.seek(0, 0) rbytes = fid_in.read() with open(broken_fname, 'wb') as fid_out: fid_out.write(rbytes[:236]) fid_out.write(b'-1 ') fid_out.write(rbytes[244:244 + int(n_bytes * 0.4)]) with pytest.warns(RuntimeWarning, match='records .* not match the file size'): raw = read_raw_edf(broken_fname, preload=True) read_raw_edf(broken_fname, exclude=raw.ch_names[:132], preload=True) # Test with \x00's in the data with open(broken_fname, 'wb') as fid_out: fid_out.write(rbytes[:184]) assert rbytes[184:192] == b'36096 ' fid_out.write(rbytes[184:192].replace(b' ', b'\x00')) fid_out.write(rbytes[192:]) raw_py = read_raw_edf(broken_fname) data_new = raw_py.get_data() assert_allclose(data, data_new) def test_duplicate_channel_labels_edf(): """Test reading edf file with duplicate channel names.""" EXPECTED_CHANNEL_NAMES = ['EEG F1-Ref-0', 'EEG F2-Ref', 'EEG F1-Ref-1'] with pytest.warns(RuntimeWarning, match='Channel names are not unique'): raw = read_raw_edf(duplicate_channel_labels_path, preload=False) assert raw.ch_names == EXPECTED_CHANNEL_NAMES def test_parse_annotation(tmpdir): """Test parsing the tal channel.""" # test the parser annot = (b'+180\x14Lights off\x14Close door\x14\x00\x00\x00\x00\x00' b'+180\x14Lights off\x14\x00\x00\x00\x00\x00\x00\x00\x00' b'+180\x14Close door\x14\x00\x00\x00\x00\x00\x00\x00\x00' b'+3.14\x1504.20\x14nothing\x14\x00\x00\x00\x00' b'+1800.2\x1525.5\x14Apnea\x14\x00\x00\x00\x00\x00\x00\x00' b'+123\x14\x14\x00\x00\x00\x00\x00\x00\x00') annot_file = tmpdir.join('annotations.txt') annot_file.write(annot) annot = [a for a in bytes(annot)] annot[1::2] = [a * 256 for a in annot[1::2]] tal_channel_A = np.array(list(map(sum, zip(annot[0::2], annot[1::2]))), dtype=np.int64) with open(str(annot_file), 'rb') as fid: # ch_data = np.fromfile(fid, dtype=np.int16, count=len(annot)) tal_channel_B = _read_ch(fid, subtype='EDF', dtype=np.int16, samp=(len(annot) - 1) // 2, dtype_byte='This_parameter_is_not_used') for tal_channel in [tal_channel_A, tal_channel_B]: onset, duration, description = _read_annotations_edf([tal_channel]) assert_equal(np.column_stack((onset, duration, description)), [[180., 0., 'Lights off'], [180., 0., 'Close door'], [180., 0., 'Lights off'], [180., 0., 'Close door'], [3.14, 4.2, 'nothing'], [1800.2, 25.5, 'Apnea']]) def test_find_events_backward_compatibility(): """Test if events are detected correctly in a typical MNE workflow.""" EXPECTED_EVENTS = [[68, 0, 2], [199, 0, 2], [1024, 0, 3], [1280, 0, 2]] # test an actual file raw = read_raw_edf(edf_path, preload=True) event_id = _get_edf_default_event_id(raw.annotations.description) event_id.pop('start') events_from_EFA, _ = events_from_annotations(raw, event_id=event_id, use_rounding=False) assert_array_equal(events_from_EFA, EXPECTED_EVENTS) @requires_pandas @pytest.mark.parametrize('fname', [edf_path, bdf_path]) def test_to_data_frame(fname): """Test EDF/BDF Raw Pandas exporter.""" ext = op.splitext(fname)[1].lstrip('.').lower() if ext == 'edf': raw = read_raw_edf(fname, preload=True, verbose='error') elif ext == 'bdf': raw = read_raw_bdf(fname, preload=True, verbose='error') _, times = raw[0, :10] df = raw.to_data_frame(index='time') assert (df.columns == raw.ch_names).all() assert_array_equal(np.round(times * 1e3), df.index.values[:10]) df = raw.to_data_frame(index=None, scalings={'eeg': 1e13}) assert 'time' in df.columns assert_array_equal(df.values[:, 1], raw._data[0] * 1e13) def test_read_raw_edf_stim_channel_input_parameters(): """Test edf raw reader deprecation.""" _MSG = "`read_raw_edf` is not supposed to trigger a deprecation warning" with pytest.warns(None) as recwarn: read_raw_edf(edf_path) assert all([w.category != DeprecationWarning for w in recwarn.list]), _MSG for invalid_stim_parameter in ['EDF Annotations', 'BDF Annotations']: with pytest.raises(ValueError, match="stim channel is not supported"): read_raw_edf(edf_path, stim_channel=invalid_stim_parameter) def _assert_annotations_equal(a, b): assert_array_equal(a.onset, b.onset) assert_array_equal(a.duration, b.duration) assert_array_equal(a.description, b.description) assert a.orig_time == b.orig_time def test_read_annot(tmpdir): """Test parsing the tal channel.""" EXPECTED_ANNOTATIONS = [[180.0, 0, 'Lights off'], [180.0, 0, 'Close door'], [180.0, 0, 'Lights off'], [180.0, 0, 'Close door'], [3.14, 4.2, 'nothing'], [1800.2, 25.5, 'Apnea']] EXPECTED_ONSET = [180.0, 180.0, 180.0, 180.0, 3.14, 1800.2] EXPECTED_DURATION = [0, 0, 0, 0, 4.2, 25.5] EXPECTED_DESC = ['Lights off', 'Close door', 'Lights off', 'Close door', 'nothing', 'Apnea'] EXPECTED_ANNOTATIONS = Annotations(onset=EXPECTED_ONSET, duration=EXPECTED_DURATION, description=EXPECTED_DESC, orig_time=None) annot = (b'+180\x14Lights off\x14Close door\x14\x00\x00\x00\x00\x00' b'+180\x14Lights off\x14\x00\x00\x00\x00\x00\x00\x00\x00' b'+180\x14Close door\x14\x00\x00\x00\x00\x00\x00\x00\x00' b'+3.14\x1504.20\x14nothing\x14\x00\x00\x00\x00' b'+1800.2\x1525.5\x14Apnea\x14\x00\x00\x00\x00\x00\x00\x00' b'+123\x14\x14\x00\x00\x00\x00\x00\x00\x00') annot_file = tmpdir.join('annotations.txt') annot_file.write(annot) onset, duration, desc = _read_annotations_edf(annotations=str(annot_file)) annotation = Annotations(onset=onset, duration=duration, description=desc, orig_time=None) _assert_annotations_equal(annotation, EXPECTED_ANNOTATIONS) # Now test when reading from buffer of data with open(str(annot_file), 'rb') as fid: ch_data = np.fromfile(fid, dtype=np.int16, count=len(annot)) onset, duration, desc = _read_annotations_edf([ch_data]) annotation = Annotations(onset=onset, duration=duration, description=desc, orig_time=None) _assert_annotations_equal(annotation, EXPECTED_ANNOTATIONS) @testing.requires_testing_data @pytest.mark.parametrize('fname', [test_generator_edf, test_generator_bdf]) def test_read_annotations(fname, recwarn): """Test IO of annotations from edf and bdf files via regexp.""" annot = read_annotations(fname) assert len(annot.onset) == 2 def test_edf_prefilter_parse(): """Test prefilter strings from header are parsed correctly.""" prefilter_basic = ["HP: 0Hz LP: 0Hz"] highpass, lowpass = _parse_prefilter_string(prefilter_basic) assert_array_equal(highpass, ["0"]) assert_array_equal(lowpass, ["0"]) prefilter_normal_multi_ch = ["HP: 1Hz LP: 30Hz"] * 10 highpass, lowpass = _parse_prefilter_string(prefilter_normal_multi_ch) assert_array_equal(highpass, ["1"] * 10) assert_array_equal(lowpass, ["30"] * 10) prefilter_unfiltered_ch = prefilter_normal_multi_ch + [""] highpass, lowpass = _parse_prefilter_string(prefilter_unfiltered_ch) assert_array_equal(highpass, ["1"] * 10) assert_array_equal(lowpass, ["30"] * 10) prefilter_edf_specs_doc = ["HP:0.1Hz LP:75Hz N:50Hz"] highpass, lowpass = _parse_prefilter_string(prefilter_edf_specs_doc) assert_array_equal(highpass, ["0.1"]) assert_array_equal(lowpass, ["75"]) @testing.requires_testing_data @pytest.mark.parametrize('fname', [test_generator_edf, test_generator_bdf]) def test_load_generator(fname, recwarn): """Test IO of annotations from edf and bdf files with raw info.""" ext = op.splitext(fname)[1][1:].lower() if ext == 'edf': raw = read_raw_edf(fname) elif ext == 'bdf': raw = read_raw_bdf(fname) assert len(raw.annotations.onset) == 2 found_types = [k for k, v in channel_indices_by_type(raw.info, picks=None).items() if v] assert len(found_types) == 1 events, event_id = events_from_annotations(raw) ch_names = ['squarewave', 'ramp', 'pulse', 'ECG', 'noise', 'sine 1 Hz', 'sine 8 Hz', 'sine 8.5 Hz', 'sine 15 Hz', 'sine 17 Hz', 'sine 50 Hz'] assert raw.get_data().shape == (11, 120000) assert raw.ch_names == ch_names assert event_id == {'RECORD START': 2, 'REC STOP': 1} assert_array_equal(events, [[0, 0, 2], [120000, 0, 1]]) @pytest.mark.parametrize('EXPECTED, test_input', [ pytest.param({'stAtUs': 'stim', 'tRigGer': 'stim', 'sine 1 Hz': 'eeg'}, 'auto', id='auto'), pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'eeg'}, None, id='None'), pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'stim'}, 'sine 1 Hz', id='single string'), pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'stim'}, 2, id='single int'), pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'stim'}, -1, id='single int (revers indexing)'), pytest.param({'stAtUs': 'stim', 'tRigGer': 'stim', 'sine 1 Hz': 'eeg'}, [0, 1], id='int list')]) def test_edf_stim_ch_pick_up(test_input, EXPECTED): """Test stim_channel.""" # This is fragile for EEG/EEG-CSD, so just omit csd TYPE_LUT = {v[0]: k for k, v in _KIND_DICT.items() if k != 'csd'} fname = op.join(data_dir, 'test_stim_channel.edf') raw = read_raw_edf(fname, stim_channel=test_input) ch_types = {ch['ch_name']: TYPE_LUT[ch['kind']] for ch in raw.info['chs']} assert ch_types == EXPECTED @testing.requires_testing_data def test_bdf_multiple_annotation_channels(): """Test BDF with multiple annotation channels.""" raw = read_raw_bdf(bdf_multiple_annotations_path) assert len(raw.annotations) == 10 descriptions = np.array(['signal_start', 'EEG-check#1', 'TestStim#1', 'TestStim#2', 'TestStim#3', 'TestStim#4', 'TestStim#5', 'TestStim#6', 'TestStim#7', 'Ligths-Off#1'], dtype='<U12') assert_array_equal(descriptions, raw.annotations.description) run_tests_if_main() @testing.requires_testing_data def test_edf_lowpass_zero(): """Test if a lowpass filter of 0Hz is mapped to the Nyquist frequency.""" with pytest.warns(RuntimeWarning, match='too long.*truncated'): raw = read_raw_edf(edf_stim_resamp_path) assert_allclose(raw.info["lowpass"], raw.info["sfreq"] / 2) @testing.requires_testing_data def test_edf_annot_sub_s_onset(): """Test reading of sub-second annotation onsets.""" raw = read_raw_edf(edf_annot_sub_s_path) assert_allclose(raw.annotations.onset, [1.951172, 3.492188])
cjayb/mne-python
mne/io/edf/tests/test_edf.py
mne/datasets/hf_sef/__init__.py
''' Harvester for the Inter-University Consortium for Political and Social Research for the SHARE project Example API call: http://www.icpsr.umich.edu/icpsrweb/ICPSR/oai/studies?verb=ListRecords&metadataPrefix=oai_dc ''' from __future__ import unicode_literals from scrapi.base import helpers from scrapi.base import OAIHarvester class IcpsrHarvester(OAIHarvester): short_name = 'icpsr' long_name = 'Inter-University Consortium for Political and Social Research' url = 'http://www.icpsr.umich.edu/' @property def schema(self): return helpers.updated_schema(self._schema, { "uris": { "canonicalUri": ('//dc:identifier/node()', helpers.compose(create_icpsr_url, helpers.single_result)), "objectUris": [('//dc:identifier/node()', icpsr_exttract_doi)] } }) base_url = 'http://www.icpsr.umich.edu/icpsrweb/ICPSR/oai/studies' property_list = ['date', 'identifier', 'type', 'coverage'] timezone_granularity = False def create_icpsr_url(identifier): return 'http://www.icpsr.umich.edu/icpsrweb/ICPSR/studies/{}'.format(identifier) def icpsr_exttract_doi(identifiers): return ['http://dx.doi.org/{}'.format(item) for item in identifiers if '10.' in item]
import pytest import six from celery.schedules import crontab from scrapi import registry from scrapi.base import BaseHarvester from scrapi.base import HarvesterMeta @pytest.fixture def mock_registry(monkeypatch): return registry class TestHarvesterMeta(object): def test_meta_records(self, mock_registry): @six.add_metaclass(HarvesterMeta) class TestClass(object): long_name = 'test' short_name = 'test' url = 'test' run_at = {} assert isinstance(mock_registry['test'], TestClass) def test_beat_schedule(self, mock_registry): for key, val in mock_registry.items(): assert(val.short_name) assert(val.long_name) assert(val.url) assert(isinstance(val.run_at, dict)) def test_beat_schedule_adds(self, mock_registry): @six.add_metaclass(HarvesterMeta) class TestClass(object): short_name = 'test' run_at = { 'hour': 1, 'minute': 1, 'day_of_week': 'mon', } assert mock_registry.beat_schedule['run_test'] == { 'args': ['test'], 'task': 'scrapi.tasks.run_harvester', 'schedule': crontab(**TestClass.run_at), } def test_raises_key_error(self, mock_registry): with pytest.raises(KeyError) as e: mock_registry['FabianVF'] assert 'No harvester named "FabianVF"' in str(e.value) class TestHarvesterBase(object): ERR_MSG = 'Can\'t instantiate abstract class TestHarvester with abstract methods {}' def test_requires_short_name(self): with pytest.raises(TypeError) as e: class TestHarvester(BaseHarvester): long_name = 'test' file_format = 'test' url = 'test' harvest = lambda x: x normalize = lambda x: x TestHarvester() assert self.ERR_MSG.format('short_name') in str(e.value) def test_requires_long_name(self): with pytest.raises(TypeError) as e: class TestHarvester(BaseHarvester): short_name = 'test' file_format = 'test' url = 'test' harvest = lambda x: x normalize = lambda x: x TestHarvester() assert self.ERR_MSG.format('long_name') in str(e.value) def test_requires_url(self): with pytest.raises(TypeError) as e: class TestHarvester(BaseHarvester): short_name = 'test' long_name = 'test' file_format = 'test' harvest = lambda x: x normalize = lambda x: x TestHarvester() assert self.ERR_MSG.format('url') in str(e.value) def test_requires_file_format(self): with pytest.raises(TypeError) as e: class TestHarvester(BaseHarvester): long_name = 'test' short_name = 'test' url = 'test' harvest = lambda x: x normalize = lambda x: x TestHarvester() assert self.ERR_MSG.format('file_format') in str(e.value) def test_requires_harvest(self): with pytest.raises(TypeError) as e: class TestHarvester(BaseHarvester): long_name = 'test' short_name = 'test' url = 'test' file_format = 'test' normalize = lambda x: x TestHarvester() assert self.ERR_MSG.format('harvest') in str(e.value) def test_requires_normalize(self): with pytest.raises(TypeError) as e: class TestHarvester(BaseHarvester): long_name = 'test' short_name = 'test' url = 'test' file_format = 'test' harvest = lambda x: x TestHarvester() assert self.ERR_MSG.format('normalize') in str(e.value)
felliott/scrapi
tests/test_base.py
scrapi/harvesters/icpsr.py
# Authors: Rob Crittenden <rcritten@redhat.com> # # Copyright (C) 2009 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import absolute_import import collections import logging import os import io import pwd import grp import re import stat import tempfile from tempfile import NamedTemporaryFile import shutil import cryptography.x509 from ipaplatform.constants import constants from ipaplatform.paths import paths from ipapython.dn import DN from ipapython.kerberos import Principal from ipapython import ipautil from ipalib import x509 # pylint: disable=ipa-forbidden-import logger = logging.getLogger(__name__) CA_NICKNAME_FMT = "%s IPA CA" NSS_DBM_FILES = ("cert8.db", "key3.db", "secmod.db") NSS_SQL_FILES = ("cert9.db", "key4.db", "pkcs11.txt") NSS_FILES = NSS_DBM_FILES + NSS_SQL_FILES + ("pwdfile.txt",) TrustFlags = collections.namedtuple('TrustFlags', 'has_key trusted ca usages') EMPTY_TRUST_FLAGS = TrustFlags(False, None, None, None) IPA_CA_TRUST_FLAGS = TrustFlags( False, True, True, frozenset({ x509.EKU_SERVER_AUTH, x509.EKU_CLIENT_AUTH, x509.EKU_CODE_SIGNING, x509.EKU_EMAIL_PROTECTION, x509.EKU_PKINIT_CLIENT_AUTH, x509.EKU_PKINIT_KDC, }), ) EXTERNAL_CA_TRUST_FLAGS = TrustFlags( False, True, True, frozenset({x509.EKU_SERVER_AUTH}), ) TRUSTED_PEER_TRUST_FLAGS = TrustFlags( False, True, False, frozenset({x509.EKU_SERVER_AUTH}), ) def get_ca_nickname(realm, format=CA_NICKNAME_FMT): return format % realm def find_cert_from_txt(cert, start=0): """ Given a cert blob (str) which may or may not contian leading and trailing text, pull out just the certificate part. This will return the FIRST cert in a stream of data. :returns: a tuple (IPACertificate, last position in cert) """ s = cert.find('-----BEGIN CERTIFICATE-----', start) e = cert.find('-----END CERTIFICATE-----', s) if e > 0: e = e + 25 if s < 0 or e < 0: raise RuntimeError("Unable to find certificate") cert = x509.load_pem_x509_certificate(cert[s:e].encode('utf-8')) return (cert, e) def parse_trust_flags(trust_flags): """ Convert certutil trust flags to TrustFlags object. """ has_key = 'u' in trust_flags if 'p' in trust_flags: if 'C' in trust_flags or 'P' in trust_flags or 'T' in trust_flags: raise ValueError("cannot be both trusted and not trusted") return False, None, None elif 'C' in trust_flags or 'T' in trust_flags: if 'P' in trust_flags: raise ValueError("cannot be both CA and not CA") ca = True elif 'P' in trust_flags: ca = False else: return TrustFlags(has_key, None, None, frozenset()) trust_flags = trust_flags.split(',') ext_key_usage = set() for i, kp in enumerate((x509.EKU_SERVER_AUTH, x509.EKU_EMAIL_PROTECTION, x509.EKU_CODE_SIGNING)): if 'C' in trust_flags[i] or 'P' in trust_flags[i]: ext_key_usage.add(kp) if 'T' in trust_flags[0]: ext_key_usage.add(x509.EKU_CLIENT_AUTH) return TrustFlags(has_key, True, ca, frozenset(ext_key_usage)) def unparse_trust_flags(trust_flags): """ Convert TrustFlags object to certutil trust flags. """ has_key, trusted, ca, ext_key_usage = trust_flags if trusted is False: if has_key: return 'pu,pu,pu' else: return 'p,p,p' elif trusted is None or ca is None: if has_key: return 'u,u,u' else: return ',,' elif ext_key_usage is None: if ca: if has_key: return 'CTu,Cu,Cu' else: return 'CT,C,C' else: if has_key: return 'Pu,Pu,Pu' else: return 'P,P,P' trust_flags = ['', '', ''] for i, kp in enumerate((x509.EKU_SERVER_AUTH, x509.EKU_EMAIL_PROTECTION, x509.EKU_CODE_SIGNING)): if kp in ext_key_usage: trust_flags[i] += ('C' if ca else 'P') if ca and x509.EKU_CLIENT_AUTH in ext_key_usage: trust_flags[0] += 'T' if has_key: for i in range(3): trust_flags[i] += 'u' trust_flags = ','.join(trust_flags) return trust_flags def verify_kdc_cert_validity(kdc_cert, ca_certs, realm): with NamedTemporaryFile() as kdc_file, NamedTemporaryFile() as ca_file: kdc_file.write(kdc_cert.public_bytes(x509.Encoding.PEM)) kdc_file.flush() x509.write_certificate_list(ca_certs, ca_file.name) ca_file.flush() try: ipautil.run( [paths.OPENSSL, 'verify', '-CAfile', ca_file.name, kdc_file.name], capture_output=True) except ipautil.CalledProcessError as e: raise ValueError(e.output) try: eku = kdc_cert.extensions.get_extension_for_class( cryptography.x509.ExtendedKeyUsage) list(eku.value).index( cryptography.x509.ObjectIdentifier(x509.EKU_PKINIT_KDC)) except (cryptography.x509.ExtensionNotFound, ValueError): raise ValueError("invalid for a KDC") principal = str(Principal(['krbtgt', realm], realm)) gns = x509.process_othernames(kdc_cert.san_general_names) for gn in gns: if isinstance(gn, x509.KRB5PrincipalName) and gn.name == principal: break else: raise ValueError("invalid for realm %s" % realm) class NSSDatabase(object): """A general-purpose wrapper around a NSS cert database For permanent NSS databases, pass the cert DB directory to __init__ For temporary databases, do not pass nssdir, and call close() when done to remove the DB. Alternatively, a NSSDatabase can be used as a context manager that calls close() automatically. """ # Traditionally, we used CertDB for our NSS DB operations, but that class # got too tied to IPA server details, killing reusability. # BaseCertDB is a class that knows nothing about IPA. # Generic NSS DB code should be moved here. def __init__(self, nssdir=None, dbtype='auto'): if nssdir is None: self.secdir = tempfile.mkdtemp() self._is_temporary = True if dbtype == 'auto': dbtype = constants.NSS_DEFAULT_DBTYPE else: dbtype = dbtype else: self.secdir = nssdir self._is_temporary = False if dbtype == 'auto': if os.path.isfile(os.path.join(self.secdir, "cert9.db")): dbtype = 'sql' elif os.path.isfile(os.path.join(self.secdir, "cert8.db")): dbtype = 'dbm' else: dbtype = constants.NSS_DEFAULT_DBTYPE self.pwd_file = os.path.join(self.secdir, 'pwdfile.txt') self.dbtype = None self.certdb = self.keydb = self.secmod = None self.filenames = () self._set_filenames(dbtype) def _set_filenames(self, dbtype): self.dbtype = dbtype if dbtype == 'dbm': self.certdb = os.path.join(self.secdir, "cert8.db") self.keydb = os.path.join(self.secdir, "key3.db") self.secmod = os.path.join(self.secdir, "secmod.db") elif dbtype == 'sql': self.certdb = os.path.join(self.secdir, "cert9.db") self.keydb = os.path.join(self.secdir, "key4.db") self.secmod = os.path.join(self.secdir, "pkcs11.txt") else: raise ValueError(dbtype) self.filenames = ( self.certdb, self.keydb, self.secmod, self.pwd_file, ) def close(self): if self._is_temporary: shutil.rmtree(self.secdir) def __enter__(self): return self def __exit__(self, type, value, tb): self.close() def run_certutil(self, args, stdin=None, **kwargs): new_args = [ paths.CERTUTIL, "-d", '{}:{}'.format(self.dbtype, self.secdir) ] new_args.extend(args) new_args.extend(['-f', self.pwd_file]) return ipautil.run(new_args, stdin, **kwargs) def run_pk12util(self, args, stdin=None, **kwargs): new_args = [ paths.PK12UTIL, "-d", '{}:{}'.format(self.dbtype, self.secdir) ] new_args.extend(args) return ipautil.run(new_args, stdin, **kwargs) def create_db(self, user=None, group=None, mode=None, backup=False): """Create cert DB :param user: User owner the secdir :param group: Group owner of the secdir :param mode: Mode of the secdir :param backup: Backup the sedir files """ if mode is not None: dirmode = mode filemode = mode & 0o666 pwdfilemode = mode & 0o660 else: dirmode = 0o750 filemode = 0o640 pwdfilemode = 0o640 uid = -1 gid = -1 if user is not None: uid = pwd.getpwnam(user).pw_uid if group is not None: gid = grp.getgrnam(group).gr_gid if backup: for filename in self.filenames: path = os.path.join(self.secdir, filename) ipautil.backup_file(path) if not os.path.exists(self.secdir): os.makedirs(self.secdir, dirmode) if not os.path.exists(self.pwd_file): # Create the password file for this db with io.open(os.open(self.pwd_file, os.O_CREAT | os.O_WRONLY, pwdfilemode), 'w', closefd=True) as f: f.write(ipautil.ipa_generate_password()) f.flush() self.run_certutil(["-N", "-f", self.pwd_file]) # Finally fix up perms os.chown(self.secdir, uid, gid) os.chmod(self.secdir, dirmode) for filename in self.filenames: path = os.path.join(self.secdir, filename) if os.path.exists(path): os.chown(path, uid, gid) if path == self.pwd_file: new_mode = pwdfilemode else: new_mode = filemode os.chmod(path, new_mode) def convert_db(self, rename_old=True): """Convert DBM database format to SQL database format **WARNING** **WARNING** **WARNING** **WARNING** **WARNING** The caller must ensure that no other process or service is accessing the NSSDB during migration. The DBM format does not support multiple processes. If more than one process opens a DBM NSSDB for writing, the database will become **irreparably corrupted**. **WARNING** **WARNING** **WARNING** **WARNING** **WARNING** """ if (self.dbtype == 'sql' or os.path.isfile(os.path.join(self.secdir, "cert9.db"))): raise ValueError( 'NSS DB {} has been migrated already.'.format(self.secdir) ) # use certutil to migrate db to new format # see https://bugzilla.mozilla.org/show_bug.cgi?id=1415912 # https://fedoraproject.org/wiki/Changes/NSSDefaultFileFormatSql args = [ paths.CERTUTIL, '-d', 'sql:{}'.format(self.secdir), '-N', '-f', self.pwd_file, '-@', self.pwd_file ] ipautil.run(args) # retain file ownership and permission, backup old files migration = ( ('cert8.db', 'cert9.db'), ('key3.db', 'key4.db'), ('secmod.db', 'pkcs11.txt'), ) for oldname, newname in migration: oldname = os.path.join(self.secdir, oldname) newname = os.path.join(self.secdir, newname) oldstat = os.stat(oldname) os.chmod(newname, stat.S_IMODE(oldstat.st_mode)) os.chown(newname, oldstat.st_uid, oldstat.st_gid) # XXX also retain SELinux context? self._set_filenames('sql') self.list_certs() # self-test if rename_old: for oldname, _ in migration: # pylint: disable=unused-variable oldname = os.path.join(self.secdir, oldname) os.rename(oldname, oldname + '.migrated') def restore(self): for filename in self.filenames: path = os.path.join(self.secdir, filename) backup_path = path + '.orig' save_path = path + '.ipasave' try: if os.path.exists(path): os.rename(path, save_path) if os.path.exists(backup_path): os.rename(backup_path, path) except OSError as e: logger.debug('%s', e) def list_certs(self): """Return nicknames and cert flags for all certs in the database :return: List of (name, trust_flags) tuples """ result = self.run_certutil(["-L"], capture_output=True) certs = result.output.splitlines() # FIXME, this relies on NSS never changing the formatting of certutil certlist = [] for cert in certs: match = re.match(r'^(.+?)\s+(\w*,\w*,\w*)\s*$', cert) if match: nickname = match.group(1) trust_flags = parse_trust_flags(match.group(2)) certlist.append((nickname, trust_flags)) return tuple(certlist) def list_keys(self): result = self.run_certutil( ["-K"], raiseonerr=False, capture_output=True ) if result.returncode == 255: return () keylist = [] for line in result.output.splitlines(): mo = re.match(r'^<\s*(\d+)>\s+(\w+)\s+([0-9a-z]+)\s+(.*)$', line) if mo is not None: slot, algo, keyid, nick = mo.groups() keylist.append((int(slot), algo, keyid, nick.strip())) return tuple(keylist) def find_server_certs(self): """Return nicknames and cert flags for server certs in the database Server certs have an "u" character in the trust flags. :return: List of (name, trust_flags) tuples """ server_certs = [] for name, flags in self.list_certs(): if flags.has_key: server_certs.append((name, flags)) return server_certs def get_trust_chain(self, nickname): """Return names of certs in a given cert's trust chain :param nickname: Name of the cert :return: List of certificate names """ root_nicknames = [] result = self.run_certutil(["-O", "-n", nickname], capture_output=True) chain = result.output.splitlines() for c in chain: m = re.match('\s*"(.*)" \[.*', c) if m: root_nicknames.append(m.groups()[0]) return root_nicknames def export_pkcs12(self, nickname, pkcs12_filename, pkcs12_passwd=None): args = [ "-o", pkcs12_filename, "-n", nickname, "-k", self.pwd_file ] pkcs12_password_file = None if pkcs12_passwd is not None: pkcs12_password_file = ipautil.write_tmp_file(pkcs12_passwd + '\n') args.extend(["-w", pkcs12_password_file.name]) try: self.run_pk12util(args) except ipautil.CalledProcessError as e: if e.returncode == 17: raise RuntimeError("incorrect password for pkcs#12 file %s" % pkcs12_filename) elif e.returncode == 10: raise RuntimeError("Failed to open %s" % pkcs12_filename) else: raise RuntimeError("unknown error exporting pkcs#12 file %s" % pkcs12_filename) finally: if pkcs12_password_file is not None: pkcs12_password_file.close() def import_pkcs12(self, pkcs12_filename, pkcs12_passwd=None): args = [ "-i", pkcs12_filename, "-k", self.pwd_file, "-v" ] pkcs12_password_file = None if pkcs12_passwd is not None: pkcs12_password_file = ipautil.write_tmp_file(pkcs12_passwd + '\n') args.extend(["-w", pkcs12_password_file.name]) try: self.run_pk12util(args) except ipautil.CalledProcessError as e: if e.returncode == 17: raise RuntimeError("incorrect password for pkcs#12 file %s" % pkcs12_filename) elif e.returncode == 10: raise RuntimeError("Failed to open %s" % pkcs12_filename) else: raise RuntimeError("unknown error import pkcs#12 file %s" % pkcs12_filename) finally: if pkcs12_password_file is not None: pkcs12_password_file.close() def import_files(self, files, import_keys=False, key_password=None, key_nickname=None): """ Import certificates and a single private key from multiple files The files may be in PEM and DER certificate, PKCS#7 certificate chain, PKCS#8 and raw private key and PKCS#12 formats. :param files: Names of files to import :param import_keys: Whether to import private keys :param key_password: Password to decrypt private keys :param key_nickname: Nickname of the private key to import from PKCS#12 files """ key_file = None extracted_key = None extracted_certs = [] for filename in files: try: with open(filename, 'rb') as f: data = f.read() except IOError as e: raise RuntimeError( "Failed to open %s: %s" % (filename, e.strerror)) # Try to parse the file as PEM file matches = list( re.finditer( br'-----BEGIN (.+?)-----(.*?)-----END \1-----', data, re.DOTALL ) ) if matches: loaded = False for match in matches: body = match.group() label = match.group(1) line = len(data[:match.start() + 1].splitlines()) if label in (b'CERTIFICATE', b'X509 CERTIFICATE', b'X.509 CERTIFICATE'): try: cert = x509.load_pem_x509_certificate(body) except ValueError as e: if label != b'CERTIFICATE': logger.warning( "Skipping certificate in %s at line %s: " "%s", filename, line, e) continue else: extracted_certs.append(cert) loaded = True continue if label in (b'PKCS7', b'PKCS #7 SIGNED DATA', b'CERTIFICATE'): try: certs = x509.pkcs7_to_certs(body) except ipautil.CalledProcessError as e: if label == b'CERTIFICATE': logger.warning( "Skipping certificate in %s at line %s: " "%s", filename, line, e) else: logger.warning( "Skipping PKCS#7 in %s at line %s: %s", filename, line, e) continue else: extracted_certs.extend(certs) loaded = True continue if label in (b'PRIVATE KEY', b'ENCRYPTED PRIVATE KEY', b'RSA PRIVATE KEY', b'DSA PRIVATE KEY', b'EC PRIVATE KEY'): if not import_keys: continue if key_file: raise RuntimeError( "Can't load private key from both %s and %s" % (key_file, filename)) # the args -v2 aes256 -v2prf hmacWithSHA256 are needed # on OpenSSL 1.0.2 (fips mode). As soon as FreeIPA # requires OpenSSL 1.1.0 we'll be able to drop them args = [ paths.OPENSSL, 'pkcs8', '-topk8', '-v2', 'aes256', '-v2prf', 'hmacWithSHA256', '-passout', 'file:' + self.pwd_file, ] if ((label != b'PRIVATE KEY' and key_password) or label == b'ENCRYPTED PRIVATE KEY'): key_pwdfile = ipautil.write_tmp_file(key_password) args += [ '-passin', 'file:' + key_pwdfile.name, ] try: result = ipautil.run( args, stdin=body, capture_output=True) except ipautil.CalledProcessError as e: logger.warning( "Skipping private key in %s at line %s: %s", filename, line, e) continue else: extracted_key = result.raw_output key_file = filename loaded = True continue if loaded: continue raise RuntimeError("Failed to load %s" % filename) # Try to load the file as DER certificate try: cert = x509.load_der_x509_certificate(data) except ValueError: pass else: extracted_certs.append(cert) continue # Try to import the file as PKCS#12 file if import_keys: try: self.import_pkcs12(filename, key_password) except RuntimeError: pass else: if key_file: raise RuntimeError( "Can't load private key from both %s and %s" % (key_file, filename)) key_file = filename server_certs = self.find_server_certs() if key_nickname: for nickname, _trust_flags in server_certs: if nickname == key_nickname: break else: raise RuntimeError( "Server certificate \"%s\" not found in %s" % (key_nickname, filename)) else: if len(server_certs) > 1: raise RuntimeError( "%s server certificates found in %s, " "expecting only one" % (len(server_certs), filename)) continue raise RuntimeError("Failed to load %s" % filename) if import_keys and not key_file: raise RuntimeError( "No server certificates found in %s" % (', '.join(files))) for cert in extracted_certs: nickname = str(DN(cert.subject)) self.add_cert(cert, nickname, EMPTY_TRUST_FLAGS) if extracted_key: with tempfile.NamedTemporaryFile() as in_file, \ tempfile.NamedTemporaryFile() as out_file: for cert in extracted_certs: in_file.write(cert.public_bytes(x509.Encoding.PEM)) in_file.write(extracted_key) in_file.flush() out_password = ipautil.ipa_generate_password() out_pwdfile = ipautil.write_tmp_file(out_password) args = [ paths.OPENSSL, 'pkcs12', '-export', '-in', in_file.name, '-out', out_file.name, '-passin', 'file:' + self.pwd_file, '-passout', 'file:' + out_pwdfile.name, ] try: ipautil.run(args) except ipautil.CalledProcessError as e: raise RuntimeError( "No matching certificate found for private key from " "%s" % key_file) self.import_pkcs12(out_file.name, out_password) def trust_root_cert(self, root_nickname, trust_flags): if root_nickname[:7] == "Builtin": logger.debug( "No need to add trust for built-in root CAs, skipping %s", root_nickname) else: trust_flags = unparse_trust_flags(trust_flags) try: self.run_certutil(["-M", "-n", root_nickname, "-t", trust_flags]) except ipautil.CalledProcessError: raise RuntimeError( "Setting trust on %s failed" % root_nickname) def get_cert(self, nickname): """ :param nickname: nickname of the certificate in the NSS database :returns: string in Python2 bytes in Python3 """ args = ['-L', '-n', nickname, '-a'] try: result = self.run_certutil(args, capture_output=True) except ipautil.CalledProcessError: raise RuntimeError("Failed to get %s" % nickname) cert, _start = find_cert_from_txt(result.output, start=0) return cert def has_nickname(self, nickname): try: self.get_cert(nickname) except RuntimeError: # This might be error other than "nickname not found". Beware. return False else: return True def export_pem_cert(self, nickname, location): """Export the given cert to PEM file in the given location""" cert = self.get_cert(nickname) with open(location, "wb") as fd: fd.write(cert.public_bytes(x509.Encoding.PEM)) os.chmod(location, 0o444) def import_pem_cert(self, nickname, flags, location): """Import a cert form the given PEM file. The file must contain exactly one certificate. """ try: with open(location) as fd: certs = fd.read() except IOError as e: raise RuntimeError( "Failed to open %s: %s" % (location, e.strerror) ) cert, st = find_cert_from_txt(certs) self.add_cert(cert, nickname, flags) try: find_cert_from_txt(certs, st) except RuntimeError: pass else: raise ValueError('%s contains more than one certificate' % location) def add_cert(self, cert, nick, flags): flags = unparse_trust_flags(flags) args = ["-A", "-n", nick, "-t", flags, '-a'] self.run_certutil(args, stdin=cert.public_bytes(x509.Encoding.PEM)) def delete_cert(self, nick): self.run_certutil(["-D", "-n", nick]) def verify_server_cert_validity(self, nickname, hostname): """Verify a certificate is valid for a SSL server with given hostname Raises a ValueError if the certificate is invalid. """ cert = self.get_cert(nickname) try: self.run_certutil(['-V', '-n', nickname, '-u', 'V'], capture_output=True) except ipautil.CalledProcessError as e: # certutil output in case of error is # 'certutil: certificate is invalid: <ERROR_STRING>\n' raise ValueError(e.output) try: cert.match_hostname(hostname) except ValueError: raise ValueError('invalid for server %s' % hostname) def verify_ca_cert_validity(self, nickname): cert = self.get_cert(nickname) if not cert.subject: raise ValueError("has empty subject") try: bc = cert.extensions.get_extension_for_class( cryptography.x509.BasicConstraints) except cryptography.x509.ExtensionNotFound: raise ValueError("missing basic constraints") if not bc.value.ca: raise ValueError("not a CA certificate") try: cert.extensions.get_extension_for_class( cryptography.x509.SubjectKeyIdentifier) except cryptography.x509.ExtensionNotFound: raise ValueError("missing subject key identifier extension") try: self.run_certutil(['-V', '-n', nickname, '-u', 'L'], capture_output=True) except ipautil.CalledProcessError as e: # certutil output in case of error is # 'certutil: certificate is invalid: <ERROR_STRING>\n' raise ValueError(e.output) def verify_kdc_cert_validity(self, nickname, realm): nicknames = self.get_trust_chain(nickname) certs = [self.get_cert(nickname) for nickname in nicknames] verify_kdc_cert_validity(certs[-1], certs[:-1], realm)
# Authors: # Rob Crittenden <rcritten@redhat.com> # # Copyright (C) 2010 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Test `ipa-getkeytab` """ import os import shutil import tempfile import gssapi import pytest from ipalib import api from ipaplatform.paths import paths from ipapython import ipautil, ipaldap from ipaserver.plugins.ldap2 import ldap2 from ipatests.test_cmdline.cmdline import cmdline_test from ipatests.test_xmlrpc.tracker import host_plugin, service_plugin def use_keytab(principal, keytab): try: tmpdir = tempfile.mkdtemp(prefix = "tmp-") ccache_file = 'FILE:%s/ccache' % tmpdir name = gssapi.Name(principal, gssapi.NameType.kerberos_principal) store = {'ccache': ccache_file, 'client_keytab': keytab} os.environ['KRB5CCNAME'] = ccache_file gssapi.Credentials(name=name, usage='initiate', store=store) conn = ldap2(api) conn.connect(autobind=ipaldap.AUTOBIND_DISABLED) conn.disconnect() except gssapi.exceptions.GSSError as e: raise Exception('Unable to bind to LDAP. Error initializing principal %s in %s: %s' % (principal, keytab, str(e))) finally: os.environ.pop('KRB5CCNAME', None) if tmpdir: shutil.rmtree(tmpdir) @pytest.fixture(scope='class') def test_host(request): host_tracker = host_plugin.HostTracker(u'test-host') return host_tracker.make_fixture(request) @pytest.fixture(scope='class') def test_service(request, test_host): service_tracker = service_plugin.ServiceTracker(u'srv', test_host.name) test_host.ensure_exists() return service_tracker.make_fixture(request) @pytest.mark.needs_ipaapi class KeytabRetrievalTest(cmdline_test): """ Base class for keytab retrieval tests """ command = "ipa-getkeytab" keytabname = None @classmethod def setup_class(cls): super(KeytabRetrievalTest, cls).setup_class() keytabfd, keytabname = tempfile.mkstemp() os.close(keytabfd) os.unlink(keytabname) cls.keytabname = keytabname @classmethod def teardown_class(cls): super(KeytabRetrievalTest, cls).teardown_class() try: os.unlink(cls.keytabname) except OSError: pass def run_ipagetkeytab(self, service_principal, args=tuple(), raiseonerr=False): new_args = [self.command, "-p", service_principal, "-k", self.keytabname] if not args: new_args.extend(['-s', api.env.host]) else: new_args.extend(list(args)) return ipautil.run( new_args, stdin=None, raiseonerr=raiseonerr, capture_error=True) def assert_success(self, *args, **kwargs): result = self.run_ipagetkeytab(*args, **kwargs) expected = 'Keytab successfully retrieved and stored in: %s\n' % ( self.keytabname) assert expected in result.error_output, ( 'Success message not in output:\n%s' % result.error_output) def assert_failure(self, retcode, message, *args, **kwargs): result = self.run_ipagetkeytab(*args, **kwargs) err = result.error_output assert message in err rc = result.returncode assert rc == retcode @pytest.mark.tier0 class test_ipagetkeytab(KeytabRetrievalTest): """ Test `ipa-getkeytab`. """ command = "ipa-getkeytab" keytabname = None def test_1_run(self, test_service): """ Create a keytab with `ipa-getkeytab` for a non-existent service. """ test_service.ensure_missing() result = self.run_ipagetkeytab(test_service.name) err = result.error_output assert 'Failed to parse result: PrincipalName not found.\n' in err, err rc = result.returncode assert rc > 0, rc def test_2_run(self, test_service): """ Create a keytab with `ipa-getkeytab` for an existing service. """ test_service.ensure_exists() self.assert_success(test_service.name, raiseonerr=True) def test_3_use(self, test_service): """ Try to use the service keytab. """ use_keytab(test_service.name, self.keytabname) def test_4_disable(self, test_service): """ Disable a kerberos principal """ retrieve_cmd = test_service.make_retrieve_command() result = retrieve_cmd() # Verify that it has a principal key assert result[u'result'][u'has_keytab'] # Disable it disable_cmd = test_service.make_disable_command() disable_cmd() # Verify that it looks disabled result = retrieve_cmd() assert not result[u'result'][u'has_keytab'] def test_5_use_disabled(self, test_service): """ Try to use the disabled keytab """ try: use_keytab(test_service.name, self.keytabname) except Exception as errmsg: assert('Unable to bind to LDAP. Error initializing principal' in str(errmsg)) class TestBindMethods(KeytabRetrievalTest): """ Class that tests '-c'/'-H'/'-Y' flags """ dm_password = None ca_cert = None @classmethod def setup_class(cls): super(TestBindMethods, cls).setup_class() dmpw_file = os.path.join(api.env.dot_ipa, '.dmpw') if not os.path.isfile(dmpw_file): pytest.skip('{} file required for this test'.format(dmpw_file)) with open(dmpw_file, 'r') as f: cls.dm_password = f.read().strip() tempfd, temp_ca_cert = tempfile.mkstemp() os.close(tempfd) shutil.copy(os.path.join(paths.IPA_CA_CRT), temp_ca_cert) cls.ca_cert = temp_ca_cert @classmethod def teardown_class(cls): super(TestBindMethods, cls).teardown_class() try: os.unlink(cls.ca_cert) except OSError: pass def check_ldapi(self): if not api.env.ldap_uri.startswith('ldapi://'): pytest.skip("LDAP URI not pointing to LDAPI socket") def test_retrieval_with_dm_creds(self, test_service): test_service.ensure_exists() self.assert_success( test_service.name, args=[ '-D', "cn=Directory Manager", '-w', self.dm_password, '-s', api.env.host]) def test_retrieval_using_plain_ldap(self, test_service): test_service.ensure_exists() ldap_uri = 'ldap://{}'.format(api.env.host) self.assert_success( test_service.name, args=[ '-D', "cn=Directory Manager", '-w', self.dm_password, '-H', ldap_uri]) @pytest.mark.skipif(os.geteuid() != 0, reason="Must have root privileges to run this test") def test_retrieval_using_ldapi_external(self, test_service): test_service.ensure_exists() self.check_ldapi() self.assert_success( test_service.name, args=[ '-Y', 'EXTERNAL', '-H', api.env.ldap_uri]) def test_retrieval_using_ldap_gssapi(self, test_service): test_service.ensure_exists() self.check_ldapi() self.assert_success( test_service.name, args=[ '-Y', 'GSSAPI', '-H', api.env.ldap_uri]) def test_retrieval_using_ldaps_ca_cert(self, test_service): test_service.ensure_exists() self.assert_success( test_service.name, args=[ '-D', "cn=Directory Manager", '-w', self.dm_password, '-H', 'ldaps://{}'.format(api.env.host), '--cacert', self.ca_cert]) def test_ldap_uri_server_raises_error(self, test_service): test_service.ensure_exists() self.assert_failure( 2, "Cannot specify server and LDAP uri simultaneously", test_service.name, args=[ '-H', 'ldaps://{}'.format(api.env.host), '-s', api.env.host], raiseonerr=False) def test_invalid_mech_raises_error(self, test_service): test_service.ensure_exists() self.assert_failure( 2, "Invalid SASL bind mechanism", test_service.name, args=[ '-H', 'ldaps://{}'.format(api.env.host), '-Y', 'BOGUS'], raiseonerr=False) def test_mech_bind_dn_raises_error(self, test_service): test_service.ensure_exists() self.assert_failure( 2, "Cannot specify both SASL mechanism and bind DN simultaneously", test_service.name, args=[ '-D', "cn=Directory Manager", '-w', self.dm_password, '-H', 'ldaps://{}'.format(api.env.host), '-Y', 'EXTERNAL'], raiseonerr=False)
apophys/freeipa
ipatests/test_cmdline/test_ipagetkeytab.py
ipapython/certdb.py
from .generate_fire import FireGenerator __all__ = ["FireGenerator"]
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Thu Jul 27 14:23:25 2017 @author: gtucker """ import numpy as np import pytest from numpy import testing from landlab import HexModelGrid, RasterModelGrid from landlab.components import ErosionDeposition, FlowAccumulator def test_route_to_multiple_error_raised(): mg = RasterModelGrid((10, 10)) z = mg.add_zeros("topographic__elevation", at="node") z += mg.x_of_node + mg.y_of_node fa = FlowAccumulator(mg, flow_director="MFD") fa.run_one_step() with pytest.raises(NotImplementedError): ErosionDeposition(mg, K=0.01, v_s=0.001, m_sp=0.5, n_sp=1.0, sp_crit=0) def test_phi_error_raised(): mg = RasterModelGrid((10, 10)) z = mg.add_zeros("topographic__elevation", at="node") z += mg.x_of_node + mg.y_of_node fa = FlowAccumulator(mg) fa.run_one_step() with pytest.raises(ValueError): ErosionDeposition(mg, phi=0) def test_extra_kwd_error_raised(): mg = RasterModelGrid((10, 10)) z = mg.add_zeros("topographic__elevation", at="node") z += mg.x_of_node + mg.y_of_node fa = FlowAccumulator(mg) fa.run_one_step() with pytest.raises(ValueError): ErosionDeposition(mg, spam=0) def test_bad_solver_name(): """ Test that any solver name besides 'basic' and 'adaptive' raises an error. """ # set up a 5x5 grid with one open outlet node and low initial elevations. nr = 5 nc = 5 mg = RasterModelGrid((nr, nc), xy_spacing=10.0) mg.add_zeros("topographic__elevation", at="node") mg["node"]["topographic__elevation"] += ( mg.node_y / 10000 + mg.node_x / 10000 + np.random.rand(len(mg.node_y)) / 10000 ) mg.set_closed_boundaries_at_grid_edges( bottom_is_closed=True, left_is_closed=True, right_is_closed=True, top_is_closed=True, ) mg.set_watershed_boundary_condition_outlet_id( 0, mg["node"]["topographic__elevation"], -9999.0 ) # Create a D8 flow handler FlowAccumulator(mg, flow_director="D8") # try to instantiate ErodionDeposition using a wrong solver name with pytest.raises(ValueError): ErosionDeposition( mg, K=0.01, v_s=0.001, m_sp=0.5, n_sp=1.0, sp_crit=0, F_f=0.0, solver="something_else", ) def test_steady_state_with_basic_solver_option(): """ Test that model matches the transport-limited analytical solution for slope/area relationship at steady state: S=((U * v_s) / (K * A^m) + U / (K * A^m))^(1/n). Also test that model matches the analytical solution for steady-state sediment flux: Qs = U * A * (1 - phi). """ # set up a 5x5 grid with one open outlet node and low initial elevations. nr = 5 nc = 5 mg = RasterModelGrid((nr, nc), xy_spacing=10.0) z = mg.add_zeros("topographic__elevation", at="node") mg["node"]["topographic__elevation"] += ( mg.node_y / 100000 + mg.node_x / 100000 + np.random.rand(len(mg.node_y)) / 10000 ) mg.set_closed_boundaries_at_grid_edges( bottom_is_closed=True, left_is_closed=True, right_is_closed=True, top_is_closed=True, ) mg.set_watershed_boundary_condition_outlet_id( 0, mg["node"]["topographic__elevation"], -9999.0 ) # Create a D8 flow handler fa = FlowAccumulator( mg, flow_director="D8", depression_finder="DepressionFinderAndRouter" ) # Parameter values for detachment-limited test K = 0.01 U = 0.0001 dt = 1.0 F_f = 0.0 # all sediment is considered coarse bedload m_sp = 0.5 n_sp = 1.0 v_s = 0.5 # Instantiate the ErosionDeposition component... ed = ErosionDeposition( mg, K=K, F_f=F_f, v_s=v_s, m_sp=m_sp, n_sp=n_sp, sp_crit=0, solver="basic", ) # ... and run it to steady state (5000x1-year timesteps). for i in range(5000): fa.run_one_step() ed.run_one_step(dt=dt) z[mg.core_nodes] += U * dt # m # compare numerical and analytical slope solutions num_slope = mg.at_node["topographic__steepest_slope"][mg.core_nodes] analytical_slope = np.power( ((U * v_s) / (K * np.power(mg.at_node["drainage_area"][mg.core_nodes], m_sp))) + ((U) / (K * np.power(mg.at_node["drainage_area"][mg.core_nodes], m_sp))), 1.0 / n_sp, ) # test for match with analytical slope-area relationship testing.assert_array_almost_equal( num_slope, analytical_slope, decimal=8, err_msg="E/D slope-area test failed", verbose=True, ) # compare numerical and analytical sediment flux solutions num_sedflux = mg.at_node["sediment__flux"][mg.core_nodes] analytical_sedflux = U * mg.at_node["drainage_area"][mg.core_nodes] # test for match with anakytical sediment flux testing.assert_array_almost_equal( num_sedflux, analytical_sedflux, decimal=8, err_msg="E/D sediment flux test failed", verbose=True, ) def test_can_run_with_hex(): """Test that model can run with hex model grid.""" # Set up a 5x5 grid with open boundaries and low initial elevations. mg = HexModelGrid((7, 7)) z = mg.add_zeros("topographic__elevation", at="node") z[:] = 0.01 * mg.x_of_node # Create a D8 flow handler fa = FlowAccumulator(mg, flow_director="FlowDirectorSteepest") # Parameter values for test 1 K = 0.001 vs = 0.0001 U = 0.001 dt = 10.0 # Create the ErosionDeposition component... ed = ErosionDeposition(mg, K=K, v_s=vs, m_sp=0.5, n_sp=1.0, solver="adaptive") # ... and run it to steady state. for i in range(2000): fa.run_one_step() ed.run_one_step(dt=dt) z[mg.core_nodes] += U * dt # Test the results s = mg.at_node["topographic__steepest_slope"] sa_factor = (1.0 + vs) * U / K a18 = mg.at_node["drainage_area"][18] a28 = mg.at_node["drainage_area"][28] s = mg.at_node["topographic__steepest_slope"] s18 = sa_factor * (a18 ** -0.5) s28 = sa_factor * (a28 ** -0.5) testing.assert_equal(np.round(s[18], 3), np.round(s18, 3)) testing.assert_equal(np.round(s[28], 3), np.round(s28, 3))
amandersillinois/landlab
tests/components/erosion_deposition/test_erodep.py
landlab/components/fire_generator/__init__.py
from .dual_structured_quad import ( DualRectilinearGraph, DualStructuredQuadGraph, DualUniformRectilinearGraph, ) from .structured_quad import ( RectilinearGraph, StructuredQuadGraph, UniformRectilinearGraph, ) __all__ = [ "StructuredQuadGraph", "RectilinearGraph", "UniformRectilinearGraph", "DualUniformRectilinearGraph", "DualRectilinearGraph", "DualStructuredQuadGraph", ]
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Thu Jul 27 14:23:25 2017 @author: gtucker """ import numpy as np import pytest from numpy import testing from landlab import HexModelGrid, RasterModelGrid from landlab.components import ErosionDeposition, FlowAccumulator def test_route_to_multiple_error_raised(): mg = RasterModelGrid((10, 10)) z = mg.add_zeros("topographic__elevation", at="node") z += mg.x_of_node + mg.y_of_node fa = FlowAccumulator(mg, flow_director="MFD") fa.run_one_step() with pytest.raises(NotImplementedError): ErosionDeposition(mg, K=0.01, v_s=0.001, m_sp=0.5, n_sp=1.0, sp_crit=0) def test_phi_error_raised(): mg = RasterModelGrid((10, 10)) z = mg.add_zeros("topographic__elevation", at="node") z += mg.x_of_node + mg.y_of_node fa = FlowAccumulator(mg) fa.run_one_step() with pytest.raises(ValueError): ErosionDeposition(mg, phi=0) def test_extra_kwd_error_raised(): mg = RasterModelGrid((10, 10)) z = mg.add_zeros("topographic__elevation", at="node") z += mg.x_of_node + mg.y_of_node fa = FlowAccumulator(mg) fa.run_one_step() with pytest.raises(ValueError): ErosionDeposition(mg, spam=0) def test_bad_solver_name(): """ Test that any solver name besides 'basic' and 'adaptive' raises an error. """ # set up a 5x5 grid with one open outlet node and low initial elevations. nr = 5 nc = 5 mg = RasterModelGrid((nr, nc), xy_spacing=10.0) mg.add_zeros("topographic__elevation", at="node") mg["node"]["topographic__elevation"] += ( mg.node_y / 10000 + mg.node_x / 10000 + np.random.rand(len(mg.node_y)) / 10000 ) mg.set_closed_boundaries_at_grid_edges( bottom_is_closed=True, left_is_closed=True, right_is_closed=True, top_is_closed=True, ) mg.set_watershed_boundary_condition_outlet_id( 0, mg["node"]["topographic__elevation"], -9999.0 ) # Create a D8 flow handler FlowAccumulator(mg, flow_director="D8") # try to instantiate ErodionDeposition using a wrong solver name with pytest.raises(ValueError): ErosionDeposition( mg, K=0.01, v_s=0.001, m_sp=0.5, n_sp=1.0, sp_crit=0, F_f=0.0, solver="something_else", ) def test_steady_state_with_basic_solver_option(): """ Test that model matches the transport-limited analytical solution for slope/area relationship at steady state: S=((U * v_s) / (K * A^m) + U / (K * A^m))^(1/n). Also test that model matches the analytical solution for steady-state sediment flux: Qs = U * A * (1 - phi). """ # set up a 5x5 grid with one open outlet node and low initial elevations. nr = 5 nc = 5 mg = RasterModelGrid((nr, nc), xy_spacing=10.0) z = mg.add_zeros("topographic__elevation", at="node") mg["node"]["topographic__elevation"] += ( mg.node_y / 100000 + mg.node_x / 100000 + np.random.rand(len(mg.node_y)) / 10000 ) mg.set_closed_boundaries_at_grid_edges( bottom_is_closed=True, left_is_closed=True, right_is_closed=True, top_is_closed=True, ) mg.set_watershed_boundary_condition_outlet_id( 0, mg["node"]["topographic__elevation"], -9999.0 ) # Create a D8 flow handler fa = FlowAccumulator( mg, flow_director="D8", depression_finder="DepressionFinderAndRouter" ) # Parameter values for detachment-limited test K = 0.01 U = 0.0001 dt = 1.0 F_f = 0.0 # all sediment is considered coarse bedload m_sp = 0.5 n_sp = 1.0 v_s = 0.5 # Instantiate the ErosionDeposition component... ed = ErosionDeposition( mg, K=K, F_f=F_f, v_s=v_s, m_sp=m_sp, n_sp=n_sp, sp_crit=0, solver="basic", ) # ... and run it to steady state (5000x1-year timesteps). for i in range(5000): fa.run_one_step() ed.run_one_step(dt=dt) z[mg.core_nodes] += U * dt # m # compare numerical and analytical slope solutions num_slope = mg.at_node["topographic__steepest_slope"][mg.core_nodes] analytical_slope = np.power( ((U * v_s) / (K * np.power(mg.at_node["drainage_area"][mg.core_nodes], m_sp))) + ((U) / (K * np.power(mg.at_node["drainage_area"][mg.core_nodes], m_sp))), 1.0 / n_sp, ) # test for match with analytical slope-area relationship testing.assert_array_almost_equal( num_slope, analytical_slope, decimal=8, err_msg="E/D slope-area test failed", verbose=True, ) # compare numerical and analytical sediment flux solutions num_sedflux = mg.at_node["sediment__flux"][mg.core_nodes] analytical_sedflux = U * mg.at_node["drainage_area"][mg.core_nodes] # test for match with anakytical sediment flux testing.assert_array_almost_equal( num_sedflux, analytical_sedflux, decimal=8, err_msg="E/D sediment flux test failed", verbose=True, ) def test_can_run_with_hex(): """Test that model can run with hex model grid.""" # Set up a 5x5 grid with open boundaries and low initial elevations. mg = HexModelGrid((7, 7)) z = mg.add_zeros("topographic__elevation", at="node") z[:] = 0.01 * mg.x_of_node # Create a D8 flow handler fa = FlowAccumulator(mg, flow_director="FlowDirectorSteepest") # Parameter values for test 1 K = 0.001 vs = 0.0001 U = 0.001 dt = 10.0 # Create the ErosionDeposition component... ed = ErosionDeposition(mg, K=K, v_s=vs, m_sp=0.5, n_sp=1.0, solver="adaptive") # ... and run it to steady state. for i in range(2000): fa.run_one_step() ed.run_one_step(dt=dt) z[mg.core_nodes] += U * dt # Test the results s = mg.at_node["topographic__steepest_slope"] sa_factor = (1.0 + vs) * U / K a18 = mg.at_node["drainage_area"][18] a28 = mg.at_node["drainage_area"][28] s = mg.at_node["topographic__steepest_slope"] s18 = sa_factor * (a18 ** -0.5) s28 = sa_factor * (a28 ** -0.5) testing.assert_equal(np.round(s[18], 3), np.round(s18, 3)) testing.assert_equal(np.round(s[28], 3), np.round(s28, 3))
amandersillinois/landlab
tests/components/erosion_deposition/test_erodep.py
landlab/graph/structured_quad/__init__.py
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. import os import collections from .utils import find_class from ...common.measurementgroup import MeasurementGroup def create_objects_from_config_node(config, item_cfg_node): klass = find_class(item_cfg_node,'bliss.common') item_name = item_cfg_node["name"] if issubclass(klass,MeasurementGroup): available_counters = _get_available_counters(config,item_cfg_node) if available_counters != item_cfg_node.get('counters',list()): item_cfg_node = item_cfg_node.deep_copy() item_cfg_node['counters'] = available_counters return { item_name: klass(item_name, item_cfg_node) } __this_path = os.path.realpath(os.path.dirname(__file__)) def get_jinja2(): global __environment try: return __environment except NameError: from jinja2 import Environment, FileSystemLoader __environment = Environment(loader=FileSystemLoader(__this_path)) return __environment def get_item(cfg): from ..static import get_config from ..conductor.web.config_app import get_item config = get_config() items = [get_item(config.get_config(name)) for name in cfg.get('config-objects', ())] result = dict(type="session", icon="fa fa-scribd", items=items) return result def get_tree(cfg, perspective): item = get_item(cfg) name = cfg.get('name') if perspective == 'files': path = os.path.join(cfg.filename, name) else: path = name item['path'] = path return item def get_html(cfg): from ..static import get_config config = get_config() objects = cfg.get('config-objects', ()) plugin_items = collections.defaultdict(list) for item_name in sorted(config.names_list): item_cfg = config.get_config(item_name) item = dict(name=item_name, checked=item_name in objects, description=item_cfg.get('description')) plugin_items[item_cfg.plugin].append(item) params = dict(name=cfg['name'], setup=cfg.get('setup-file', ''), plugin_items=plugin_items) html_template = get_jinja2().select_template(['session.html']) return html_template.render(**params) def edit(cfg, request): import flask.json if request.method == "POST": form = request.form orig_name = form["__original_name__"] name = form['name'] result = dict(name=name) if name != orig_name: result["message"] = "Change of card name not supported yet!" result["type"] = "danger" return flask.json.dumps(result) session_cfg = cfg.get_config(name) session_cfg['setup-file'] = form['setup'] session_cfg['config-objects'] = form.getlist('items[]') session_cfg.save() result["message"] = "'%s' configuration applied!" % name result["type"] = "success" return flask.json.dumps(result) def config_objects(cfg, request): import flask.json objects = cfg.get('config-objects', ()) return flask.json.dumps(objects) def _get_available_counters(config,mes_grp_cfg): cnt_list = mes_grp_cfg.get('counters',list()) include_list = mes_grp_cfg.get('include',list()) for sub_grp_ref in include_list: sub_grp_cfg = config.get_config(sub_grp_ref) if sub_grp_cfg is None: raise RuntimeError("Reference **%s** in MeasurementGroup **%s** in file %s doesn't exist" % (sub_grp_ref,mes_grp_cfg.get('name'),mes_grp_cfg.filename)) sub_cnt_list = _get_available_counters(config,sub_grp_cfg) cnt_list.extend(sub_cnt_list) return cnt_list
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. import pytest import time from bliss.common import measurementgroup from bliss import setup_globals from bliss.common import scans from bliss.common import measurement def test_mg(beacon): session = beacon.get("test_session") session.setup() default_mg = getattr(setup_globals, 'ACTIVE_MG') test_mg = getattr(setup_globals, 'test_mg') assert measurementgroup.get_all() == [test_mg] assert default_mg.name == 'test_mg' assert measurementgroup.get_active_name() == 'test_mg' def test_mg_enable_disable(): default_mg = getattr(setup_globals, 'ACTIVE_MG') assert list(default_mg.available) == ['diode'] default_mg.disable = 'diode' assert list(default_mg.enable) == [] assert list(default_mg.disable) == ['diode'] default_mg.enable = 'diode' assert list(default_mg.disable) == [] assert list(default_mg.enable) == ['diode'] def test_scan(): scans.ct(0.1) def test_clear_mg(): default_mg = getattr(setup_globals, 'ACTIVE_MG') delattr(setup_globals, 'test_mg') assert default_mg.name is None assert measurementgroup.get_active_name() is None def test_scan_fail(): with pytest.raises(ValueError): scans.ct(0.1)
tiagocoutinho/bliss
tests/session/test_mg.py
bliss/config/plugins/session.py
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. """Bliss shell user interface style""" from ptpython.style import get_all_ui_styles from prompt_toolkit.token import Token __all__ = ('bliss_ui_style',) _default = get_all_ui_styles()['default'] bliss_ui_style = dict(_default) bliss_ui_style.update({ Token.Toolbar.Status.Name: 'bg:#0000ff #ffffff bold', Token.Toolbar.Status.Label: 'bg:#222222 #ffffff bold', Token.Toolbar.Status.Ok: 'bg:#222222 #ffffff', Token.Toolbar.Status.Warning: 'bg:#ffa500 #ffffff', Token.Toolbar.Status.Alarm: 'bg:#aa0000 #ffffff', Token.Toolbar.Status.Error: 'bg:#ff00ff #ffffff', Token.Toolbar.Status.Changing: 'bg:#3333ff #ffffff', Token.Toolbar.Status.Open: 'bg:#00aa00 #ffffff', Token.Toolbar.Status.Close: 'bg:#aa0000 #ffffff', })
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. import pytest import time from bliss.common import measurementgroup from bliss import setup_globals from bliss.common import scans from bliss.common import measurement def test_mg(beacon): session = beacon.get("test_session") session.setup() default_mg = getattr(setup_globals, 'ACTIVE_MG') test_mg = getattr(setup_globals, 'test_mg') assert measurementgroup.get_all() == [test_mg] assert default_mg.name == 'test_mg' assert measurementgroup.get_active_name() == 'test_mg' def test_mg_enable_disable(): default_mg = getattr(setup_globals, 'ACTIVE_MG') assert list(default_mg.available) == ['diode'] default_mg.disable = 'diode' assert list(default_mg.enable) == [] assert list(default_mg.disable) == ['diode'] default_mg.enable = 'diode' assert list(default_mg.disable) == [] assert list(default_mg.enable) == ['diode'] def test_scan(): scans.ct(0.1) def test_clear_mg(): default_mg = getattr(setup_globals, 'ACTIVE_MG') delattr(setup_globals, 'test_mg') assert default_mg.name is None assert measurementgroup.get_active_name() is None def test_scan_fail(): with pytest.raises(ValueError): scans.ct(0.1)
tiagocoutinho/bliss
tests/session/test_mg.py
bliss/shell/cli/style.py
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. """ Keithley meters. YAML_ configuration example: .. code-block:: yaml plugin: keithley # (1) name: k_ctrl_1 # (2) class: Ammeter # (3) model: 6485 # (4) auto_zero: False # (5) display: False # (6) zero_check: False # (7) zero_correct: False # (8) gpib: # (9) url: enet://gpibid31eh pad: 12 sensors: # (10) - name: mondio # (11) address: 1 # (12) current_dc_nplc: 0.1 # (13) current_dc_auto_range: False # (14) #. plugin name (mandatory: keithley) #. controller name (mandatory). Some controller settings are needed. To hook the settings to the controller we use the controller name. That is why it is mandatory #. plugin class (mandatory) #. controller model (optional. default: discover by asking instrument *IDN) #. auto-zero enabled (optional, default: False) #. display enabled (optional, default: False) #. zero-check enabled (optional, default: False). Only for 6485! #. zero-correct enabled (optional, default: False). Only for 6485! #. controller URL (mandatory, valid: gpib, tcp, serial) #. gpib (mandatory: *url* and *pad*). See :class:~bliss.comm.gpib.Gpib for list of options #. serial (mandatory: *port*). See :class:~bliss.comm.serial.Serial for list of options #. tcp (mandatory: *url*). See :class:~bliss.comm.tcp.Tcp for list of options #. list of sensors (mandatory) #. sensor name (mandatory) #. sensor address (mandatory). Valid values: #. model 6482: 1, 2 #. model 6485: 1 #. model 2000: 1 #. sensor DC current NPLC (optional, default: 0.1) #. sensor DC current auto-range (optional, default: False) Some parameters (described below) are stored as settings. This means that the static configuration described above serves as a *default configuration*. The first time ever the system is brought to life it will read this configuration and apply it to the settings. From now on, the keithley object will rely on its settings. This is the same principle as it is applied on the bliss axis velocity for example. The following controller parameters are stored as settings: *auto_zero*, *display*, (and *zero_check* and *zero_correct* only for 6485). The following sensor parameters are stored as settings: *current_dc_nplc* and *auto_range*. A demo is available from the command line: $ python -m bliss.controllers.keithley <url> <pad> Developer details: READ? <=> INIT + FETCH? MEASURE[:<function>]? <=> CONF[:<function>] + READ? == CONF[:<function>] + INIT + READ? """ import time import weakref import functools import collections import numpy import gevent from bliss.common.measurement import SamplingCounter from bliss.comm.util import get_interface from bliss.config.settings import HashSetting from bliss.comm.exceptions import CommunicationError from bliss.comm.scpi import Cmd as SCPICmd from bliss.comm.scpi import Commands as SCPICommands from bliss.comm.scpi import BaseDevice as BaseDeviceSCPI from .keithley_scpi_mapping import COMMANDS as SCPI_COMMANDS from .keithley_scpi_mapping import MODEL_COMMANDS as SCPI_MODEL_COMMANDS class KeithleySCPI(BaseDeviceSCPI): """Keithley instrument through SCPI language. Can be used with any Keithley SCPI capable device. Example usage:: from bliss.comm.gpib import Gpib from bliss.controllers.keithley import KeithleySCPI gpib = Gpib('enet://gpibhost', pad=10) keithley = KeithleySCPI(gpib) print( keithley('*IDN?') ) print( keithley['*IDN'] ) """ def __init__(self, *args, **kwargs): commands = SCPICommands(SCPI_COMMANDS) model = str(kwargs.pop('model')) commands.update(SCPI_MODEL_COMMANDS.get(model, {})) kwargs['commands'] = commands super(KeithleySCPI, self).__init__(*args, **kwargs) class Sensor(SamplingCounter): def __init__(self, config, controller): name = config['name'] SamplingCounter.__init__(self, name, controller) self.__controller = controller self.config = config self.address = int(config['address']) self.index = self.address - 1 self.controller.initialize_sensor(self) @property def controller(self): return self.__controller def __int__(self): return self.address def __getattr__(self, name): return getattr(self.controller, name) def measure(self, func=None): return self.controller.measure(func=func)[self.index] def data(self): return self.controller.data()[self.index] def get_auto_range(self): return self.controller.get_auto_range(self) def set_auto_range(self, auto_range): self.controller.set_auto_range(self, auto_range) def get_nplc(self): return self.controller.get_nplc(self) def set_nplc(self, nplc): self.controller.set_nplc(self, nplc) class BaseAcquisition(object): def __init__(self, keithley, acq_time, channel): self.keithley = keithley self.channel = channel self.acq_time = acq_time self.start_time = None self.end_time = None self.acq_task = None self.value = None self.__prepared = False @property def total_time(self): return self.end_time - self.start_time def prepare(self): self._prepare() self.__prepared = True def _do_acq(self): raise NotImplementedError def _prepare(self): raise NotImplementedError def __on_acq_finished(self, task): self.end_time = time.time() self.acq_task = None def __set_value(self, value): self.value = value def start(self): if not self.__prepared: raise RuntimeError('Need prepare before start') self.__prepared = False self.start_time = time.time() self.acq_task = gevent.spawn(self._do_acq) self.acq_task.link(self.__on_acq_finished) self.acq_task.link_value(self.__set_value) return self.acq_task def abort(self): if self.acq_task is not None: self.acq_task.kill() self.acq_task.join() self.keithley.abort() def get_value(self): if self.acq_task is not None: return self.acq_task.get() raise ValueError('no value') class HardwareAcquisition(BaseAcquisition): """ keithley acquisition where integration is done by the keithley itself using its internal buffer. Limited to 2500 points """ def _calc(self, acq_time=None): nplc = self.keithley.get_current_dc_nplc(self.channel) if acq_time is None: nb_points = 0 else: nb_points = int(acq_time * 1000 / (2.96 + 3 * (nplc * 20 + 1.94))) nb_points = max(1, nb_points) acq_time = acq_time + 3 * 20 * nplc / 1000 if nb_points > 2499: raise ValueError("cannot perform an acquisition of more " "than 2499 points (calculated %d)" % nb_points) return nb_points, acq_time def _prepare(self): nb_points, acq_time = self._calc(self.acq_time) self.nb_points, self.real_acq_time = nb_points, acq_time keithley = self.keithley keithley._logger.info("nb points=%s; effective acq. time=%s", nb_points, acq_time) if nb_points == 0: raise RuntimeError("continuous acquisition not supported") elif nb_points == 1: start = time.time() # activate one-shot measurement self.keithley("CONF") else: keithley("ABOR", # abort whatever keithley is doing "TRAC:CLE", # empty buffer "*OPC?") # synchronize keithley("TRIG:DEL 0", # no trigger delay "TRIG:COUN %d" % nb_points, # nb of points to trig "TRAC:POIN %d" % nb_points, # nb of points to store "TRAC:FEED:CONT NEXT", # use buffer "*OPC?") # synchronize def _do_acq(self): if self.nb_points == 0: pass else: # start acquisition self.keithley('INIT') gevent.sleep(max(0, self.real_acq_time-0.5)) # synchronize self.keithley['*OPC'] try: if self.nb_points == 1: value = self.keithley['FETCH'] else: value = self.keithley['CALC3:DATA'] except ValueError: value = float('nan') # finally: # self.keithley('TRAC:FEED:CONT NEV') return value class SoftwareAcquisition(BaseAcquisition): def _prepare(self): self.keithley.set_meas_func() def _do_acq(self): buff = [] t0, acq_time = time.time(), self.acq_time while (time.time() - t0) < acq_time: try: data = self.keithley['READ'] except ValueError: data = float('nan') buff.append(data) self.buffer = numpy.array(buff) return numpy.average(self.buffer) def read_cmd(name, settings=None): def read(self): value = self[name] if settings: self.settings[settings] = value return value read.__name__ = 'get_' + name.lower().replace(':', '_') return read def write_cmd(name, settings=None): def write(self, value=None): if value is None and settings: value = self.settings[settings] self[name] = value if settings: self.settings[settings] = value write.__name__ = 'set_' + name.lower().replace(':', '_') return write def cmd(name, settings=True): return read_cmd(name, settings=settings), write_cmd(name, settings=settings) def read_sensor_cmd(name, settings=None): def read(self, sensor): address = int(sensor) cmd = self._sensor_cmd(sensor, name) value = self[cmd] if settings: value = self.sensor_settings[address][settings] return value read.__name__ = 'get_' + name return read def write_sensor_cmd(name, settings=None): def write(self, sensor, value=None): address = int(sensor) cmd = self._sensor_cmd(sensor, name) if value is None: if settings: value = self.sensor_settings[address][settings] self[cmd] = value if settings: self.sensor_settings[address][settings] = value return write def sensor_cmd(name, settings=None): return (read_sensor_cmd(name, settings=settings), write_sensor_cmd(name, settings=settings)) def read_sensor_meas_cmd(name, settings=None): def read(self, sensor, func=None): address = int(sensor) cmd = self._meas_func_sensor_cmd(sensor, name, func) value = self[cmd] if settings: sname = self._meas_func_settings_name(settings, func) value = self.sensor_settings[address][sname] return value read.__name__ = 'get_' + name return read def write_sensor_meas_cmd(name, settings=None): def write(self, sensor, value=None, func=None): address = int(sensor) cmd = self._meas_func_sensor_cmd(sensor, name, func) if settings: sname = self._meas_func_settings_name(settings, func) else: sname = None if value is None: if sname: value = self.sensor_settings[address][sname] self[cmd] = value if sname: self.sensor_settings[address][sname] = value write.__name__ = 'set_' + name return write def sensor_meas_cmd(name, settings=None): return (read_sensor_meas_cmd(name, settings=settings), write_sensor_meas_cmd(name, settings=settings)) class BaseMultimeter(KeithleySCPI): """""" HARD_INTEG, SOFT_INTEG = 'HARDWARE', 'SOFTWARE' DefaultConfig = { 'auto_zero': False, 'display_enable': False, 'meas_func': 'CURR:DC', 'integration_mode': SOFT_INTEG, } DefaultSensorConfig = { } MeasureFunctions = SCPICommands() Sensor = Sensor SoftwareAcquisition = SoftwareAcquisition HardwareAcquisition = HardwareAcquisition def __init__(self, config, interface=None): kwargs = dict(config) if interface: kwargs['interface'] = interface self.name = config['name'] self.config = config self.__active_acq = None super(BaseMultimeter, self).__init__(**kwargs) defaults = {} for key, value in self.DefaultConfig.items(): defaults[key] = config.get(key, value) k_setting_name = 'multimeter.' + self.name self.settings = HashSetting(k_setting_name, default_values=defaults) self.sensor_settings = {} def __str__(self): return '{0}({1})'.format(self.__class__.__name__, self.name) def initialize(self): self('*RST', '*OPC?') with self: self.set_meas_func() self.set_display_enable() self.set_auto_zero() self._initialize() self('*OPC?') def initialize_sensor(self, sensor): address = int(sensor) if address in self.sensor_settings: return setting_name = 'multimeter.{0}'.format(sensor.name) defaults = {} for key, value in self.DefaultSensorConfig.items(): defaults[key] = sensor.config.get(key, value) settings = HashSetting(setting_name, default_values=defaults) self.sensor_settings[address] = settings with self: self._initialize_sensor(sensor) def _initialize_sensor(self, sensor): pass def _meas_func(self, func=None): if func is None: func = self.settings['meas_func'] return self.MeasureFunctions[func]['max_command'] def _meas_func_settings_name(self, name, func=None): func = self._meas_func(func).replace(':', '_') return '{0}_{1}'.format(func, name).lower() def _meas_func_sensor_cmd(self, sensor, param, func=None): func = self._meas_func(func) return 'SENS%d:%s:%s' % (sensor, func, param) def _sensor_cmd(self, sensor, param): return 'SENS%d:%s' % (sensor, param) def get_meas_func(self): func = self['CONF'] return self.MeasureFunctions[func]['max_command'] def set_meas_func(self, func=None): func = self._meas_func(func) self('CONF:' + func) self.settings['meas_func'] = func get_display_enable, set_display_enable = cmd('DISP:ENAB', 'display_enable') get_auto_zero, set_auto_zero = cmd('SYST:AZER', 'auto_zero') get_nplc, set_nplc = \ sensor_meas_cmd('NPLC', 'nplc') get_auto_range, set_auto_range = \ sensor_meas_cmd('RANG:AUTO', 'auto_range') def measure(self, func=None): func = self._meas_func(func) return self['MEAS:' + func] def read(self): return self['READ'] def read_all(self,*counters): values = self.read() return [values[int(cnt) - 1] for cnt in counters] def data(self): return self['DATA'] def abort(self): return self('ABOR', 'OPC?') def set_integration_mode(self, mode): self.settings['integration_mode'] = mode def get_integration_mode(self): return self.settings['integration_mode'] def create_acq(self, acq_time=None, channel=1, integ_mode=None): integ_mode = integ_mode or self.get_integration_mode() if integ_mode == self.HARD_INTEG: klass = self.HardwareAcquisition elif integ_mode == self.SOFT_INTEG: klass = self.SoftwareAcquisition return klass(self, acq_time, channel) def pprint(self): values = self.settings.get_all() settings = '\n'.join((' {0}={1}'.format(k, v) for k, v in values.items())) idn = '\n'.join((' {0}={1}'.format(k, v) for k, v in self['*IDN'].items())) print('{0}:\n name:{1}\n IDN:\n{2}\n settings:\n{3}' .format(self, self.name, idn, settings)) class BaseAmmeter(BaseMultimeter): MeasureFunctions = SCPICommands({'CURRent[:DC]': SCPICmd()}) DefaultSensorConfig = dict(BaseMultimeter.DefaultSensorConfig, current_dc_auto_range=False, current_dc_nplc=0.1 ) get_current_dc_nplc, set_current_dc_nplc = \ sensor_cmd('CURR:DC:NPLC', 'current_dc_nplc') get_current_dc_auto_range, set_current_dc_auto_range = \ sensor_cmd('CURR:DC:RANG:AUTO', 'current_dc_auto_range') def _initialize_sensor(self, sensor): super(BaseAmmeter, self)._initialize_sensor(sensor) self.set_current_dc_auto_range(sensor) self.set_current_dc_nplc(sensor) class Ammeter6485(BaseAmmeter): DefaultConfig = dict(BaseAmmeter.DefaultConfig, zero_check=False, zero_correct=False) def _initialize(self): with self: self['FORM:ELEM'] = ['READ'] # just get the current when you read (no timestamp) self['CALC3:FORM'] = 'MEAN' # buffer statistics is mean self['TRAC:FEED'] = 'SENS' # source of reading is sensor self.set_zero_check() self.set_zero_correct() get_zero_check, set_zero_check = cmd('SYST:ZCH', 'zero_check') get_zero_correct, set_zero_correct = cmd('SYST:ZCOR', 'zero_correct') def zero_correct(self): '''Zero correct procedure''' zero_check = self.settings['zero_check'] zero_correct = self.settings['zero_correct'] with self: self.set_zero_check(True) # zero check must be enabled self.set_zero_correct(False) # zero correct state must be disabled self('INIT') # trigger a reading self('SYST:ZCOR:ACQ') # acquire zero correct value self.set_zero_correct(zero_correct) # restore zero correct state self.set_zero_check(zero_check) # restore zero check class Ammeter6482(BaseAmmeter): def _initialize(self): with self: # should it not be FORM:ELEM instead of FORM:ELEM:TRAC ? self['FORM:ELEM:TRAC'] = ['CURR1', 'CURR2'] self['CALC8:FORM'] = 'MEAN' # buffer statistics is mean class Multimeter2000(BaseMultimeter): MeasureFunctions = SCPICommands({ 'CURRent[:DC]': SCPICmd(), 'CURRent:AC': SCPICmd(), 'VOLTage[:DC]': SCPICmd(), 'VOLTage:AC': SCPICmd(), 'RESistance': SCPICmd(), 'FRESistance': SCPICmd(), 'PERiod': SCPICmd(), 'FREQuency': SCPICmd(), 'TEMPerature': SCPICmd(),}) def Multimeter(config): class_name = config['class'] model = config.get('model') kwargs = {} if model is None: # Discover model interface, _, _ = get_interface(**config) decode_IDN = SCPI_COMMANDS['*IDN'].get('get') idn = decode_IDN(interface.write_readline('*IDN?\n')) model = idn['model'] kwargs['interface'] = interface config['model'] = model else: model = str(model) if class_name in ('Multimeter', 'Ammeter'): class_name += model elif not class_name.endswith(model): raise ValueError('class: {0} != model: {1}'.format(class_name, model)) klass = globals()[class_name] obj = klass(config, **kwargs) obj.initialize() return obj def create_objects_from_config_node(config, node): name = node['name'] if 'sensors' in node: # controller node obj = Multimeter(node) else: # sensor node obj = create_sensor(config, node) return {name: obj} def create_sensor(config, node): ctrl_node = node.parent while ctrl_node and 'sensors' not in ctrl_node: ctrl_node = ctrl_node.parent ctrl = config.get(ctrl_node['name']) with ctrl: obj = Sensor(node, ctrl) return obj def main(): """ Start a Keithley console. The following example will start a Keithley console with one Keithley instrument called *k*:: $ python -m bliss.controllers.keithley gpib --pad=15 enet://gpibhost keithley> print( k['*IDN?'] ) """ import sys import logging import argparse try: import serial except: serial = None parser = argparse.ArgumentParser(description=main.__doc__) parser.add_argument('--model', type=str, default=None, help='keithley model (ex: 6482) [default: auto discover]') parser.add_argument('--log-level', type=str, default='info', choices=['debug', 'info', 'warning', 'error'], help='log level [default: info]') parser.add_argument('--scpi-log-level', type=str, default='info', choices=['trace', 'debug', 'info', 'warning', 'error'], help='log level for scpi object [default: info]') parser.add_argument('--keithley-log-level', type=str, default='info', choices=['trace', 'debug', 'info', 'warning', 'error'], help='log level for keithley object [default: info]') parser.add_argument('--gevent', action='store_true', default=False, help='enable gevent in console [default: False]') subparsers = parser.add_subparsers(title='object/connection', dest='connection', description='config object name or valid type of connections', help='choose keithley config object name or type of connection') config_parser = subparsers.add_parser('config', help='keithey config object') config_parser.add_argument('name', help='config object name') gpib_parser = subparsers.add_parser('gpib', help='GPIB connection') add = gpib_parser.add_argument add('url', type=str, help='gpib instrument url (ex: gpibhost, enet://gpibhost:5000)') add('--pad', type=int, required=True, help='primary address') add('--sad', type=int, default=0, help='secondary address [default: 0]') add('--tmo', type=int, default=10, help='GPIB timeout (GPIB tmo unit) [default: 11 (=1s)]') add('--eos', type=str, default='\n', help=r"end of string [default: '\n']") add('--timeout', type=float, default=1.1, help='socket timeout [default: 1.1]') tcp_parser = subparsers.add_parser('tcp', help='TCP connection') add = tcp_parser.add_argument add('url', type=str, help='TCP instrument url (ex: keith6485:25000)') if serial: serial_parser = subparsers.add_parser('serial', help='serial line connection') add = serial_parser.add_argument add('port', type=str, help='serial instrument port (ex: rfc2217://.., ser2net://..)') add('--baudrate', type=int, default=9600, help='baud rate') add('--bytesize', type=int, choices=[6, 7, 8], default=serial.EIGHTBITS, help='byte size') add('--parity', choices=serial.PARITY_NAMES.keys(), default=serial.PARITY_NONE, help='parity type') add('--timeout', type=float, default=5, help='timeout') add('--stopbits', type=float, choices=[1, 1.5, 2], default=serial.STOPBITS_ONE, help='stop bits') add('--xonxoff', action='store_true', default=False, help='') add('--rtscts', action='store_true', default=False, help='') add('--write-timeout', dest='writeTimeout', type=float, default=None, help='') add('--dsrdtr', action='store_true', default=False, help='') add('--interchar-timeout', dest='interCharTimeout', type=float, default=None, help='') add('--eol', type=str, default='\n', help="end of line [default: '\\n']") args = parser.parse_args() vargs = vars(args) model = vargs.pop('model', None) log_level = getattr(logging, vargs.pop('log_level').upper()) keithley_log_level = vargs.pop('keithley_log_level').upper() scpi_log_level = vargs.pop('scpi_log_level').upper() logging.basicConfig(level=log_level, format='%(asctime)s %(levelname)s %(name)s: %(message)s') gevent_arg = vargs.pop('gevent') conn = vargs.pop('connection') local = {} if conn == 'config': from bliss.config.static import get_config config = get_config() name = vargs['name'] keithley = create_objects_from_config_node(config, config.get_config(name))[name] if isinstance(keithley, Sensor): sensor = keithley keithley = sensor.controller local['s'] = sensor else: kwargs = { conn: vargs , 'model': model } keithley = KeithleySCPI(**kwargs) local['k'] = keithley keithley._logger.setLevel(keithley_log_level) keithley.language._logger.setLevel(scpi_log_level) keithley.interface._logger.setLevel(scpi_log_level) sys.ps1 = 'keithley> ' sys.ps2 = len(sys.ps1)*'.' if gevent_arg: try: from gevent.monkey import patch_sys except ImportError: mode = 'no gevent' else: patch_sys() import code mode = not gevent_arg and 'interactive, no gevent' or 'gevent' banner = '\nWelcome to Keithley console ' \ '(connected to {0}) ({1})\n'.format(keithley, mode) code.interact(banner=banner, local=local) if __name__ == "__main__": main()
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. import pytest import time from bliss.common import measurementgroup from bliss import setup_globals from bliss.common import scans from bliss.common import measurement def test_mg(beacon): session = beacon.get("test_session") session.setup() default_mg = getattr(setup_globals, 'ACTIVE_MG') test_mg = getattr(setup_globals, 'test_mg') assert measurementgroup.get_all() == [test_mg] assert default_mg.name == 'test_mg' assert measurementgroup.get_active_name() == 'test_mg' def test_mg_enable_disable(): default_mg = getattr(setup_globals, 'ACTIVE_MG') assert list(default_mg.available) == ['diode'] default_mg.disable = 'diode' assert list(default_mg.enable) == [] assert list(default_mg.disable) == ['diode'] default_mg.enable = 'diode' assert list(default_mg.disable) == [] assert list(default_mg.enable) == ['diode'] def test_scan(): scans.ct(0.1) def test_clear_mg(): default_mg = getattr(setup_globals, 'ACTIVE_MG') delattr(setup_globals, 'test_mg') assert default_mg.name is None assert measurementgroup.get_active_name() is None def test_scan_fail(): with pytest.raises(ValueError): scans.ct(0.1)
tiagocoutinho/bliss
tests/session/test_mg.py
bliss/controllers/keithley.py
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2017 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. import re import time import gevent import functools from bliss.common.greenlet_utils import protect_from_kill from bliss.controllers.motor import Controller from bliss.common.axis import AxisState,Axis from bliss.common.utils import object_method from bliss.comm.tcp import Command import struct import numpy import sys class Icepap(Controller): """ IcePAP stepper controller without Deep Technology of Communication. But if you prefer to have it (DTC) move to IcePAP controller class. Use this class controller at your own risk, because you won't have any support... """ STATUS_DISCODE = { 0 : ('POWERENA', 'power enabled'), 1 : ('NOTACTIVE', 'axis configured as not active'), 2 : ('ALARM', 'alarm condition'), 3 : ('REMRACKDIS', 'remote rack disable input signal'), 4 : ('LOCRACKDIS', 'local rack disable switch'), 5 : ('REMAXISDIS', 'remote axis disable input signal'), 6 : ('LOCAXISDIS', 'local axis disable switch'), 7 : ('SOFTDIS', 'software disable'), } STATUS_MODCODE = { 0 : ('OPER', 'operation mode'), 1 : ('PROG', 'programmation mode'), 2 : ('TEST', 'test mode'), 3 : ('FAIL', 'fail mode'), } STATUS_STOPCODE = { 0 : ('SCEOM', 'end of movement'), 1 : ('SCSTOP', 'last motion was stopped'), 2 : ('SCABORT', 'last motion was aborted'), 3 : ('SCLIMPOS', 'positive limitswitch reached'), 4 : ('SCLINNEG', 'negative limitswitch reached'), 5 : ('SCSETTLINGTO', 'settling timeout'), 6 : ('SCAXISDIS', 'axis disabled (no alarm)'), 7 : ('SCBIT7', 'n/a'), 8 : ('SCINTFAIL', 'internal failure'), 9 : ('SCMOTFAIL', 'motor failure'), 10 : ('SCPOWEROVL', 'power overload'), 11 : ('SCHEATOVL', 'driver overheating'), 12 : ('SCCLERROR', 'closed loop error'), 13 : ('SCCENCERROR', 'control encoder error'), 14 : ('SCBIT14', 'n/a'), 15 : ('SCEXTALARM', 'external alarm'), } def __init__(self,*args,**kwargs): Controller.__init__(self,*args,**kwargs) self._cnx = None self._last_axis_power_time = dict() def initialize(self): hostname = self.config.get("host") self._cnx = Command(hostname,5000,eol='\n') self._icestate = AxisState() self._icestate.create_state("POWEROFF", "motor power is off") for codes in (self.STATUS_DISCODE,self.STATUS_MODCODE,self.STATUS_STOPCODE): for state,desc in codes.values(): self._icestate.create_state(state,desc) def finalize(self): if self._cnx is not None: self._cnx.close() def initialize_axis(self,axis): axis.address = axis.config.get("address",lambda x: x) if hasattr(axis,'_init_software'): axis._init_software() def initialize_hardware_axis(self,axis): if axis.config.get('autopower', converter=bool, default=True): try: self.set_on(axis) except: sys.excepthook(*sys.exc_info()) if hasattr(axis,'_init_hardware'): axis._init_hardware() #Axis power management def set_on(self,axis): """ Put the axis power on """ self._power(axis,True) def set_off(self,axis): """ Put the axis power off """ self._power(axis,False) def _power(self,axis,power): _ackcommand(self._cnx,"POWER %s %s" % ("ON" if power else "OFF",axis.address)) self._last_axis_power_time[axis] = time.time() def read_position(self,axis,cache=True): pos_cmd = "FPOS" if cache else "POS" return int(_command(self._cnx,"?%s %s" % (pos_cmd,axis.address))) def set_position(self,axis,new_pos): if isinstance(axis,SlaveAxis): pre_cmd = "%d:DISPROT LINKED;" % axis.address else: pre_cmd = None _ackcommand(self._cnx,"POS %s %d" % (axis.address,int(round(new_pos))), pre_cmd = pre_cmd) return self.read_position(axis,cache=False) def read_velocity(self,axis): return float(_command(self._cnx,"?VELOCITY %s" % axis.address)) def set_velocity(self,axis,new_velocity): _ackcommand(self._cnx,"VELOCITY %s %f" % (axis.address,new_velocity)) return self.read_velocity(axis) def read_acceleration(self,axis): acctime = float(_command(self._cnx,"?ACCTIME %s" % axis.address)) velocity = self.read_velocity(axis) return velocity/float(acctime) def set_acceleration(self,axis,new_acc): velocity = self.read_velocity(axis) new_acctime = velocity/new_acc _ackcommand(self._cnx,"ACCTIME %s %f" % (axis.address,new_acctime)) return self.read_acceleration(axis) def state(self,axis): last_power_time = self._last_axis_power_time.get(axis,0) if time.time() - last_power_time < 1.: status_cmd = "?STATUS" else: self._last_axis_power_time.pop(axis,None) status_cmd = "?FSTATUS" status = int(_command(self._cnx,"%s %s" % (status_cmd,axis.address)),16) status ^= 1<<23 #neg POWERON FLAG state = self._icestate.new() for mask,value in (((1<<9),"READY"), ((1<<10|1<<11),"MOVING"), ((1<<18),"LIMPOS"), ((1<<19),"LIMNEG"), ((1<<20),"HOME"), ((1<<23),"POWEROFF")): if status & mask: state.set(value) state_mode = (status >> 2) & 0x3 if state_mode: state.set(self.STATUS_MODCODE.get(state_mode)[0]) stop_code = (status >> 14) & 0xf if stop_code: state.set(self.STATUS_STOPCODE.get(stop_code)[0]) disable_condition = (status >> 4) & 0x7 if disable_condition: state.set(self.STATUS_DISCODE.get(disable_condition)[0]) if state.READY: #if motor is ready then no need to investigate deeper return state if not state.MOVING: # it seems it is not safe to call warning and/or alarm commands # while homing motor, so let's not ask if motor is moving if status & (1<<13): try: warning = _command(self._cnx,"%d:?WARNING" % axis.address) except TypeError: pass else: warn_str = "warning condition: \n" + warning status.create_state("WARNING",warn_str) status.set("WARNING") try: alarm = _command(self._cnx,"%d:?ALARM" % axis.address) except (RuntimeError,TypeError): pass else: if alarm != "NO": alarm_dsc = "alarm condition: " + str(alarm) state.create_state("ALARMDESC",alarm_dsc) state.set("ALARMDESC") return state def get_info(self,axis): pre_cmd = '%s:' % axis.address r = "MOTOR : %s\n" % axis.name r += "SYSTEM : %s (ID: %s) (VER: %s)\n" % (self._cnx._host, _command(self._cnx,"0:?ID"), _command(self._cnx,"?VER")) r += "DRIVER : %s\n" % axis.address r += "POWER : %s\n" % _command(self._cnx,pre_cmd + "?POWER") r += "CLOOP : %s\n" % _command(self._cnx,pre_cmd + "?PCLOOP") r += "WARNING : %s\n" % _command(self._cnx,pre_cmd + "?WARNING") r += "ALARM : %s\n" % _command(self._cnx,pre_cmd + "?ALARM") return r def raw_write(self,message,data = None): return _command(self._cnx,message,data) def raw_write_read(self,message,data = None): return _ackcommand(self._cnx,message,data) def prepare_move(self,motion): pass def start_one(self,motion): if isinstance(motion.axis,SlaveAxis): pre_cmd = "%d:DISPROT LINKED;" % motion.axis.address else: pre_cmd = None _ackcommand(self._cnx,"MOVE %s %d" % (motion.axis.address, motion.target_pos), pre_cmd = pre_cmd) def start_all(self,*motions): if motions > 1: cmd = "MOVE GROUP " cmd += ' '.join(["%s %d" % (m.axis.address,m.target_pos) for m in motions]) _ackcommand(self._cnx,cmd) elif motions: self.start_one(motions[0]) def stop(self,axis): _command(self._cnx,"STOP %s" % axis.address) def stop_all(self,*motions): for motion in motions: self.stop(motion.axis) def home_search(self,axis,switch): cmd = "HOME " + ("+1" if switch > 0 else "-1") _ackcommand(self._cnx,"%s:%s" % (axis.address,cmd)) # IcePAP status is not immediately MOVING after home search command is sent gevent.sleep(0.2) def home_state(self,axis): s = self.state(axis) if s != 'READY' and s != 'POWEROFF': s.set('MOVING') return s def limit_search(self,axis,limit): cmd = "SRCH LIM" + ("+" if limit>0 else "-") _ackcommand(self._cnx,"%s:%s" % (axis.address,cmd)) # TODO: MG18Nov14: remove this sleep (state is not immediately MOVING) gevent.sleep(0.1) def initialize_encoder(self,encoder): # Get axis config from bliss config # address form is XY : X=rack {0..?} Y=driver {1..8} encoder.address = encoder.config.get("address", int) # Get optional encoder input to read enctype = encoder.config.get("type",str,"ENCIN").upper() # Minium check on encoder input if enctype not in ['ENCIN', 'ABSENC', 'INPOS', 'MOTOR', 'AXIS', 'SYNC']: raise ValueError('Invalid encoder type') encoder.enctype = enctype def read_encoder(self,encoder): value = _command(self._cnx,"?ENC %s %d" % (encoder.enctype,encoder.address)) return int(value) def set_encoder(self,encoder,steps): _ackcommand(self._cnx,"ENC %s %d %d" % (encoder.enctype,encoder.address,steps)) def set_event_positions(self,axis_or_encoder,positions): int_position = numpy.array(positions,dtype=numpy.int32) #position has to be ordered int_position.sort() address = axis_or_encoder.address if not len(int_position): _ackcommand(self._cnx,"%s:ECAMDAT CLEAR" % address) return if isinstance(axis_or_encoder,Axis): source = 'AXIS' else: # encoder source = 'MEASURE' #load trigger positions _ackcommand(self._cnx,"%s:*ECAMDAT %s DWORD" % (address,source), int_position) # send the trigger on the multiplexer _ackcommand(self._cnx,"%s:SYNCAUX eCAM" % address) def get_event_positions(self,axis_or_encoder): """ For this controller this method should be use for debugging purposed only... """ address = axis_or_encoder.address #Get the number of positions reply = _command(self._cnx,"%d:?ECAMDAT" % address) reply_exp = re.compile("(\w+) +([+-]?\d+) +([+-]?\d+) +(\d+)") m = reply_exp.match(reply) if m is None: raise RuntimeError("Reply Didn't expected: %s" % reply) source = m.group(1) nb = int(m.group(4)) if isinstance(axis_or_encoder,Axis): nb = nb if source == 'AXIS' else 0 else: # encoder nb = nb if source == "MEASURE" else 0 positions = numpy.zeros((nb,),dtype = numpy.int32) if nb > 0: reply_exp = re.compile(".+: +([+-]?\d+)") reply = _command(self._cnx,"%d:?ECAMDAT %d" % (address,nb)) for i,line in enumerate(reply.split('\n')): m = reply_exp.match(line) if m: pos = int(m.group(1)) positions[i] = pos return positions def get_linked_axis(self): reply = _command(self._cnx,"?LINKED") linked = dict() for line in reply.strip().split('\n'): values = line.split() linked[values[0]] = [int(x) for x in values[1:]] return linked @object_method(types_info=("bool","bool")) def activate_closed_loop(self,axis,active): _command(self._cnx,"#%s:PCLOOP %s" % (axis.address,"ON" if active else "OFF")) return active @object_method(types_info=("None","bool")) def is_closed_loop_activate(self,axis): return True if _command(self._cnx,"%s:?PCLOOP" % axis.address) == 'ON' else False @object_method(types_info=("None","None")) def reset_closed_loop(self,axis): measure_position = int(_command(self._cnx,"%s:?POS MEASURE" % axis.address)) self.set_position(axis,measure_position) if axis.config.get('autopower', converter=bool, default=True): self.set_on(axis) axis.sync_hard() @object_method(types_info=("None","int")) def temperature(self,axis): return int(_command(self._cnx,"%s:?MEAS T" % axis.address)) @object_method(types_info=(("float","bool"),"None")) def set_tracking_positions(self,axis,positions,cyclic = False): """ Send position to the controller which will be tracked. positions -- are expressed in user unit cyclic -- cyclic position or not default False @see activate_track method """ address = axis.address if not len(positions): _ackcommand(self._cnx,"%s:LISTDAT CLEAR" % address) return dial_positions = axis.user2dial(numpy.array(positions, dtype=numpy.float)) step_positions = numpy.array(dial_positions * axis.steps_per_unit, dtype=numpy.int32) _ackcommand(self._cnx,"%d:*LISTDAT %s DWORD" % (address, "CYCLIC" if cyclic else "NOCYCLIC"), step_positions) @object_method(types_info=("None",("float","bool"))) def get_tracking_positions(self,axis): """ Get the tacking positions. This method should only be use for debugging return a tuple with (positions,cyclic flag) """ address = axis.address #Get the number of positions reply = _command(self._cnx,"%d:?LISTDAT" % address) reply_exp = re.compile("(\d+) *(\w+)?") m = reply_exp.match(reply) if m is None: raise RuntimeError("Reply didn't expected: %s" % reply) nb = int(m.group(1)) positions = numpy.zeros((nb,),dtype = numpy.int32) cyclic = True if m.group(2) == "CYCLIC" else False if nb > 0: reply_exp = re.compile(".+: +([+-]?\d+)") reply = _command(self._cnx,"%d:?LISTDAT %d" % (address,nb)) for i,line in enumerate(reply.split('\n')): m = reply_exp.match(line) if m: pos = int(m.group(1)) positions[i] = pos dial_positions = positions / axis.steps_per_unit positions = axis.dial2user(dial_positions) return positions,cyclic @object_method(types_info=(("bool","str"),"None")) def activate_tracking(self,axis,activate,mode = None): """ Activate/Deactivate the tracking position depending on activate flag mode -- default "INPOS" if None. mode can be : - SYNC -> Internal SYNC signal - ENCIN -> ENCIN signal - INPOS -> INPOS signal - ABSENC -> ABSENC signal """ address = axis.address if not activate: _ackcommand(self._cnx,"STOP %d" % address) axis.sync_hard() else: if mode is None: mode = "INPOS" possibles_modes = ["SYNC","ENCIN","INPOS","ABSENC"] if mode not in possibles_modes: raise ValueError("mode %s is not managed, can only choose %s" % (mode,possibles_modes)) if mode == "INPOS": _ackcommand(self._cnx, "%d:POS INPOS 0" % address) _ackcommand(self._cnx,"%d:LTRACK %s" % (address,mode)) @object_method(types_info=("float", "None")) def blink(self, axis, second=3.): """ Blink axis driver """ _command(self._cnx,"%d:BLINK %f" % (axis.address, second)) def reset(self): _command(self._cnx,"RESET") def mdspreset(self): """ Reset the MASTER DSP """ _command(self._cnx,"_dsprst") def reboot(self): _command(self._cnx,"REBOOT") self._cnx.close() _check_reply = re.compile("^[#?]|^[0-9]+:\?") @protect_from_kill def _command(cnx,cmd,data = None,pre_cmd = None): if data is not None: uint16_view = data.view(dtype=numpy.uint16) data_checksum = uint16_view.sum() header = struct.pack("<III", 0xa5aa555a, # Header key len(uint16_view),int(data_checksum) & 0xffffffff) data_test = data.newbyteorder('<') if len(data_test) and data_test[0] != data[0]: # not good endianness data = data.byteswap() full_cmd = "%s\n%s%s" % (cmd,header,data.tostring()) transaction = cnx._write(full_cmd) else: full_cmd = "%s%s\n" % (pre_cmd or '',cmd) transaction = cnx._write(full_cmd) with cnx.Transaction(cnx,transaction) : if _check_reply.match(cmd): msg = cnx._readline(transaction=transaction, clear_transaction=False) cmd = cmd.strip('#').split(' ')[0] msg = msg.replace(cmd + ' ','') if msg.startswith('$'): msg = cnx._readline(transaction=transaction, clear_transaction=False,eol='$\n') elif msg.startswith('ERROR'): raise RuntimeError(msg.replace('ERROR ','')) return msg.strip(' ') def _ackcommand(cnx,cmd,data = None,pre_cmd = None): if not cmd.startswith('#') and not cmd.startswith('?'): cmd = '#' + cmd return _command(cnx,cmd,data,pre_cmd) from .shutter import Shutter from .switch import Switch from .linked import LinkedAxis, SlaveAxis
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. import pytest import time from bliss.common import measurementgroup from bliss import setup_globals from bliss.common import scans from bliss.common import measurement def test_mg(beacon): session = beacon.get("test_session") session.setup() default_mg = getattr(setup_globals, 'ACTIVE_MG') test_mg = getattr(setup_globals, 'test_mg') assert measurementgroup.get_all() == [test_mg] assert default_mg.name == 'test_mg' assert measurementgroup.get_active_name() == 'test_mg' def test_mg_enable_disable(): default_mg = getattr(setup_globals, 'ACTIVE_MG') assert list(default_mg.available) == ['diode'] default_mg.disable = 'diode' assert list(default_mg.enable) == [] assert list(default_mg.disable) == ['diode'] default_mg.enable = 'diode' assert list(default_mg.disable) == [] assert list(default_mg.enable) == ['diode'] def test_scan(): scans.ct(0.1) def test_clear_mg(): default_mg = getattr(setup_globals, 'ACTIVE_MG') delattr(setup_globals, 'test_mg') assert default_mg.name is None assert measurementgroup.get_active_name() is None def test_scan_fail(): with pytest.raises(ValueError): scans.ct(0.1)
tiagocoutinho/bliss
tests/session/test_mg.py
bliss/controllers/motors/icepap/__init__.py
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. from __future__ import absolute_import import os import sys import pkgutil import weakref from bliss.common.axis import Axis, AxisRef from bliss.common.encoder import Encoder from bliss.config.static import Config, get_config from bliss.common.tango import DeviceProxy from bliss.config.plugins.bliss import find_class import gevent import hashlib import sys __KNOWN_AXIS_PARAMS = { "name": str, "controller": str, "user_tag": lambda x: x.split(','), "unit": str, "steps_per_unit": float, "velocity": float, "acceleration": float, "backlash": float, "low_limit": float, "high_limit": float, } __KNOWN_CONTROLLER_PARAMS = ("name", "class", "plugin", "axes") __this_path = os.path.realpath(os.path.dirname(__file__)) def __get_controller_class_names(): return bliss.controllers.motors.__all__ def get_jinja2(): global __environment try: return __environment except NameError: from jinja2 import Environment, FileSystemLoader __environment = Environment(loader=FileSystemLoader(__this_path)) return __environment def get_item(cfg): klass = cfg.get('class') result = {'class': klass } if klass is None: result['icon'] = 'fa fa-gear' result['type'] = 'axis' else: result['icon'] = 'fa fa-gears' result['type'] = 'controller' return result def get_tree(cfg, perspective): item = get_item(cfg) name = cfg.get('name') ctrl_class = cfg.get("class") if ctrl_class is None: path = os.path.join(get_tree(cfg.parent, 'files')['path'], name) else: if perspective == "files": path = os.path.join(cfg.filename, name) else: path = name item['path'] = path return item def get_html(cfg): ctrl_class = cfg.get("class") if ctrl_class is None: return get_axis_html(cfg) else: return get_ctrl_html(cfg) def get_axis_html(cfg): name = cfg["name"] ctrl_class = cfg.parent.get("class") ctrl_name = cfg.parent.get("name") vars = dict(cfg.items()) filename = "emotion_" + ctrl_class + "_axis.html" html_template = get_jinja2().select_template([filename, "emotion_axis.html"]) extra_params = {} for key, value in vars.items(): if key not in __KNOWN_AXIS_PARAMS: extra_params[key] = dict(name=key, label=key.capitalize(), value=value) tags = cfg.get(Config.USER_TAG_KEY, []) if not isinstance(tags, (tuple, list)): tags = [tags] vars["tags"] = tags vars["controller_class"] = ctrl_class if ctrl_name: vars["controller_name"] = ctrl_name vars["params"] = extra_params vars["units"] = cfg.get("unit", "unit") controllers = list() vars["controllers"] = controllers for controller_name in __get_controller_class_names(): controllers.append({"class": controller_name}) vars["__tango_server__"] = __is_tango_device(name) return html_template.render(**vars) def get_ctrl_html(cfg): ctrl_class = cfg.get("class") vars = dict(cfg.items()) filename = "emotion_" + ctrl_class + ".html" html_template = get_jinja2().select_template([filename, "emotion_controller.html"]) extra_params = [] for key, value in vars.items(): if key not in __KNOWN_CONTROLLER_PARAMS: extra_params.append(dict(name=key, label=key.capitalize(), value=value)) vars["params"] = extra_params controllers = list() vars["controllers"] = controllers pkgpath = os.path.dirname(bliss.controllers.motors.__file__) for _, controller_name, _ in pkgutil.iter_modules([pkgpath]): controllers.append({"class": controller_name}) for axis in vars["axes"]: device = __is_tango_device(axis['name']) if device: vars["__tango_server__"] = True break else: vars["__tango_server__"] = False return html_template.render(**vars) def __is_tango_device(name): try: return DeviceProxy(name) is not None except: pass return False def __tango_apply_config(name): try: device = DeviceProxy(name) device.command_inout("ApplyConfig", True) msg = "'%s' configuration saved and applied to server!" % name msg_type = "success" except PyTango.DevFailed as df: msg = "'%s' configuration saved but <b>NOT</b> applied to " \ " server:\n%s" % (name, df[0].desc) msg_type = "warning" sys.excepthook(*sys.exc_info()) except Exception as e: msg = "'%s' configuration saved but <b>NOT</b> applied to " \ " server:\n%s" % (name, str(e)) msg_type = "warning" sys.excepthook(*sys.exc_info()) return msg, msg_type def controller_edit(cfg, request): import flask.json if request.method == "POST": form = dict([(k,v) for k,v in request.form.items() if v]) update_server = form.pop("__update_server__") == 'true' orig_name = form.pop("__original_name__") name = form.get("name", orig_name) result = dict(name=name) if name != orig_name: result["message"] = "Change of controller name not supported yet!" result["type"] = "danger" return flask.json.dumps(result) ctrl_cfg = cfg.get_config(orig_name) axes_data = {} objs = set() for param_name, param_value in form.items(): if " " in param_name: # axis param param_name, axis_name = param_name.split() obj = cfg.get_config(axis_name) try: param_value = __KNOWN_AXIS_PARAMS[param_name](param_value) except KeyError: pass else: # controller param obj = ctrl_cfg obj[param_name] = param_value objs.add(obj) axes_server_results = {} for obj in objs: obj.save() if update_server and obj != ctrl_cfg: name = obj["name"] axes_server_results[name] = __tango_apply_config(name) msg_type = "success" if update_server: if ctrl_cfg in objs: msg_type = "warning" msg = "'%s' configuration saved! " \ "TANGO server needs to be (re)started!" % name else: msg = "'%s' configuration applied!" % name for axis_name, axis_result in axes_server_results: msg += "<br/>" + axis_result['message'] axis_msg_type = axis_result['type'] if axis_msg_type != "success": msg_type = axis_msg_type else: msg = "'%s' configuration applied!" % name result["message"] = msg result["type"] = msg_type return flask.json.dumps(result) def axis_edit(cfg, request): import flask.json if request.method == "POST": form = dict([(k,v) for k,v in request.form.items() if v]) update_server = form.pop("__update_server__") == 'true' orig_name = form.pop("__original_name__") name = form["name"] result = dict(name=name) if name != orig_name: result["message"] = "Change of axis name not supported yet!" result["type"] = "danger" return flask.json.dumps(result) axis_cfg = cfg.get_config(orig_name) for k, v in form.iteritems(): try: v = __KNOWN_AXIS_PARAMS[k](v) except KeyError: pass axis_cfg[k] = v axis_cfg.save() if update_server: result["message"], result["type"] = __tango_apply_config(name) else: result["message"] = "'%s' configuration saved!" % name result["type"] = "success" return flask.json.dumps(result) __ACTIONS = \ { "add": [ {"id": "emotion_add_controller", "label": "Add controller", "icon": "fa fa-gears", "action": "plugin/emotion/add_controller", "disabled": True,}, {"id": "emotion_add_axis", "label": "Add axis", "icon": "fa fa-gears", "action": "plugin/emotion/add_axis", "disabled": True}],} def actions(): return __ACTIONS def add_controller(cfg, request): if request.method == "GET": return flask.json.dumps(dict(html="<h1>TODO</h1>", message="not implemented", type="danger")) def add_axis(cfg, request): if request.method == "GET": return flask.json.dumps(dict(html="<h1>TODO</h1>", message="not implemented", type="danger")) def create_objects_from_config_node(config, node): if 'axes' in node or 'encoders' in node: # asking for a controller obj_name = None else: obj_name = node.get('name') node = node.parent controller_class_name = node.get('class') controller_name = node.get('name') if controller_name is None: h = hashlib.md5() for axis_config in node.get('axes'): name = axis_config.get('name') if name is not None: h.update(name) controller_name = h.hexdigest() controller_class = find_class(node, "bliss.controllers.motors") controller_module = sys.modules[controller_class.__module__] axes = list() axes_names = list() encoders = list() encoders_names = list() switches = list() switches_names = list() shutters = list() shutters_names = list() for axis_config in node.get('axes'): axis_name = axis_config.get("name") if axis_name.startswith("$"): axis_class = AxisRef axis_name = axis_name.lstrip('$') else: axis_class_name = axis_config.get("class") if axis_class_name is None: axis_class = Axis else: axis_class = getattr(controller_module, axis_class_name) axes_names.append(axis_name) axes.append((axis_name, axis_class, axis_config)) for objects,objects_names,default_class,default_class_name,objects_config in\ ((encoders,encoders_names,Encoder,'',node.get('encoders',[])), (shutters,shutters_names,None,'Shutter',node.get('shutters',[])), (switches,switches_names,None,'Switch',node.get('switches',[])), ): for object_config in objects_config: object_name = object_config.get("name") object_class_name = object_config.get("class") object_config = _checkref(config,object_config) if object_class_name is None: object_class = default_class if object_class is None: try: object_class = getattr(controller_module, default_class_name) except AttributeError: pass else: object_class = getattr(controller_module, object_class_name) objects_names.append(object_name) objects.append((object_name, object_class, object_config)) controller = controller_class(controller_name, node, axes, encoders, shutters, switches) controller._update_refs(config) controller.initialize() all_names = axes_names + encoders_names + switches_names + shutters_names cache_dict = dict(zip(all_names, [controller]*len(all_names))) ctrl = cache_dict.pop(obj_name,None) if ctrl is not None: obj = create_object_from_cache(None, obj_name, controller) return { controller_name: controller, obj_name: obj }, cache_dict else: return {controller_name: controller }, cache_dict def create_object_from_cache(config, name, controller): for func in (controller.get_axis, controller.get_encoder, controller.get_switch, controller.get_shutter): try: return func(name) except KeyError: pass raise KeyError(name) def _checkref(config,cfg): obj_cfg = cfg.deep_copy() for key,value in obj_cfg.iteritems(): if isinstance(value,str) and value.startswith('$'): # convert reference to item from config obj = weakref.proxy(config.get(value)) obj_cfg[key] = obj return obj_cfg
# -*- coding: utf-8 -*- # # This file is part of the bliss project # # Copyright (c) 2016 Beamline Control Unit, ESRF # Distributed under the GNU LGPLv3. See LICENSE for more info. import pytest import time from bliss.common import measurementgroup from bliss import setup_globals from bliss.common import scans from bliss.common import measurement def test_mg(beacon): session = beacon.get("test_session") session.setup() default_mg = getattr(setup_globals, 'ACTIVE_MG') test_mg = getattr(setup_globals, 'test_mg') assert measurementgroup.get_all() == [test_mg] assert default_mg.name == 'test_mg' assert measurementgroup.get_active_name() == 'test_mg' def test_mg_enable_disable(): default_mg = getattr(setup_globals, 'ACTIVE_MG') assert list(default_mg.available) == ['diode'] default_mg.disable = 'diode' assert list(default_mg.enable) == [] assert list(default_mg.disable) == ['diode'] default_mg.enable = 'diode' assert list(default_mg.disable) == [] assert list(default_mg.enable) == ['diode'] def test_scan(): scans.ct(0.1) def test_clear_mg(): default_mg = getattr(setup_globals, 'ACTIVE_MG') delattr(setup_globals, 'test_mg') assert default_mg.name is None assert measurementgroup.get_active_name() is None def test_scan_fail(): with pytest.raises(ValueError): scans.ct(0.1)
tiagocoutinho/bliss
tests/session/test_mg.py
bliss/config/plugins/emotion.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.contrib import messages from django.http.response import HttpResponseRedirect from django.utils.encoding import force_text from django.utils.translation import ugettext as _ from django.views.generic import DetailView from shuup.admin.modules.orders.toolbar import OrderDetailToolbar from shuup.admin.utils.urls import get_model_url from shuup.apps.provides import get_provide_objects from shuup.core.models import Order, OrderStatus, OrderStatusRole from shuup.utils.excs import Problem class OrderDetailView(DetailView): model = Order template_name = "shuup/admin/orders/detail.jinja" context_object_name = "order" def get_toolbar(self): return OrderDetailToolbar(self.object) def get_context_data(self, **kwargs): context = super(OrderDetailView, self).get_context_data(**kwargs) context["toolbar"] = self.get_toolbar() context["title"] = force_text(self.object) context["order_sections"] = [] provided_information = [] for provided_info in sorted(get_provide_objects("admin_order_information"), key=lambda x: x.order): info = provided_info(self.object) if info.provides_info(): provided_information.append((info.title, info.information)) context["provided_information"] = provided_information order_sections_provides = sorted(get_provide_objects("admin_order_section"), key=lambda x: x.order) for admin_order_section in order_sections_provides: # Check whether the Section should be visible for the current object if admin_order_section.visible_for_object(self.object): context["order_sections"].append(admin_order_section) # add additional context data where the key is the order_section identifier context[admin_order_section.identifier] = admin_order_section.get_context_data(self.object) return context class OrderSetStatusView(DetailView): model = Order def get(self, request, *args, **kwargs): return HttpResponseRedirect(get_model_url(self.get_object())) def post(self, request, *args, **kwargs): order = self.object = self.get_object() new_status = OrderStatus.objects.get(pk=int(request.POST["status"])) old_status = order.status if new_status.role == OrderStatusRole.COMPLETE and not order.can_set_complete(): raise Problem(_("Unable to set order as completed at this point")) if new_status.role == OrderStatusRole.CANCELED and not order.can_set_canceled(): raise Problem(_("Paid, shipped, or canceled orders cannot be canceled")) order.status = new_status order.save(update_fields=("status",)) message = _("Order status changed: {old_status} to {new_status}").format( old_status=old_status, new_status=new_status) order.add_log_entry(message, user=request.user, identifier="status_change") messages.success(self.request, message) return HttpResponseRedirect(get_model_url(self.get_object()))
# This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. import json import pytest from shuup.admin.modules.orders.mass_actions import CancelOrderAction, OrderDeliveryPdfAction, \ OrderConfirmationPdfAction from shuup.admin.modules.orders.views import OrderListView from shuup.core.models import Order from shuup.core.models import OrderStatusRole from shuup.testing.factories import get_default_supplier, get_default_shop, create_random_order, create_product, \ create_random_person from shuup.testing.utils import apply_request_middleware from shuup_tests.utils import printable_gibberish try: import weasyprint except ImportError: weasyprint = None @pytest.mark.django_db def test_mass_edit_orders(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": CancelOrderAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 for order in Order.objects.all(): assert order.status.role == OrderStatusRole.CANCELED @pytest.mark.django_db def test_mass_edit_orders2(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": OrderConfirmationPdfAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_%s_confirmation.pdf' % order.pk else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders3(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order1 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) order2 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order1.status.role != OrderStatusRole.CANCELED assert order2.status.role != OrderStatusRole.CANCELED payload = { "action": OrderConfirmationPdfAction().identifier, "values": [order1.pk, order2.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_confirmation_pdf.zip' else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders4(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": OrderDeliveryPdfAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 assert response["content-type"] == "application/json" order.create_shipment_of_all_products(supplier) order.save() request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=shipment_%s_delivery.pdf' % order.pk else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders5(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order1 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) order2 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order1.status.role != OrderStatusRole.CANCELED assert order2.status.role != OrderStatusRole.CANCELED payload = { "action": OrderDeliveryPdfAction().identifier, "values": [order1.pk, order2.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) order1.create_shipment_of_all_products(supplier) order1.save() order2.create_shipment_of_all_products(supplier) order2.save() request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_delivery_pdf.zip' else: assert response["content-type"] == "application/json"
suutari-ai/shoop
shuup_tests/admin/test_order_mass_actions.py
shuup/admin/modules/orders/views/detail.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from django import VERSION from shuup.testing.models import PseudoPaymentProcessor from shuup.utils.analog import BaseLogEntry, define_log_model def test_analog(): PseudoPaymentProcessorLogEntry = define_log_model(PseudoPaymentProcessor) assert PseudoPaymentProcessorLogEntry.__module__ == PseudoPaymentProcessor.__module__ related_field_name = "related" # Behavior changs in Django 1.9 if VERSION >= (1, 9): related_field_name = "rel" relation_manager = getattr(PseudoPaymentProcessorLogEntry._meta.get_field("target"), related_field_name) assert relation_manager.to is PseudoPaymentProcessor relation_manager = getattr(PseudoPaymentProcessor.log_entries, related_field_name) assert relation_manager.model is PseudoPaymentProcessor assert relation_manager.related_model is PseudoPaymentProcessorLogEntry assert issubclass(PseudoPaymentProcessorLogEntry, BaseLogEntry) assert isinstance(PseudoPaymentProcessorLogEntry(), BaseLogEntry)
# This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. import json import pytest from shuup.admin.modules.orders.mass_actions import CancelOrderAction, OrderDeliveryPdfAction, \ OrderConfirmationPdfAction from shuup.admin.modules.orders.views import OrderListView from shuup.core.models import Order from shuup.core.models import OrderStatusRole from shuup.testing.factories import get_default_supplier, get_default_shop, create_random_order, create_product, \ create_random_person from shuup.testing.utils import apply_request_middleware from shuup_tests.utils import printable_gibberish try: import weasyprint except ImportError: weasyprint = None @pytest.mark.django_db def test_mass_edit_orders(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": CancelOrderAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 for order in Order.objects.all(): assert order.status.role == OrderStatusRole.CANCELED @pytest.mark.django_db def test_mass_edit_orders2(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": OrderConfirmationPdfAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_%s_confirmation.pdf' % order.pk else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders3(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order1 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) order2 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order1.status.role != OrderStatusRole.CANCELED assert order2.status.role != OrderStatusRole.CANCELED payload = { "action": OrderConfirmationPdfAction().identifier, "values": [order1.pk, order2.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_confirmation_pdf.zip' else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders4(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": OrderDeliveryPdfAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 assert response["content-type"] == "application/json" order.create_shipment_of_all_products(supplier) order.save() request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=shipment_%s_delivery.pdf' % order.pk else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders5(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order1 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) order2 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order1.status.role != OrderStatusRole.CANCELED assert order2.status.role != OrderStatusRole.CANCELED payload = { "action": OrderDeliveryPdfAction().identifier, "values": [order1.pk, order2.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) order1.create_shipment_of_all_products(supplier) order1.save() order2.create_shipment_of_all_products(supplier) order2.save() request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_delivery_pdf.zip' else: assert response["content-type"] == "application/json"
suutari-ai/shoop
shuup_tests/admin/test_order_mass_actions.py
shuup_tests/utils/test_analog.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from shuup.core.utils.name_mixin import NameMixin class Ahnuld(NameMixin): def __init__(self, first_name, last_name="", prefix="", suffix=""): self.first_name_str = first_name self.last_name_str = last_name self.name = "%s %s" % (first_name, last_name) self.prefix = prefix self.suffix = suffix def get_fullname(self): return "%s %s" % (self.first_name_str, self.last_name_str) def test_basic_name(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Strong") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ahnuld.get_fullname() def test_only_firstname(): ahnuld = Ahnuld(first_name="Ahnuld") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ahnuld.first_name # full_name should be first name def test_prefixes(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Strong", prefix="mr.") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ("%s %s" % (ahnuld.prefix, ahnuld.get_fullname())) def test_prefix_and_suffix(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Strong", prefix="mr.", suffix="the oak") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ("%s %s %s" % (ahnuld.prefix, ahnuld.get_fullname(), ahnuld.suffix)) def test_awkward_names(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Super Strong in The Sky") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ahnuld.get_fullname()
# This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. import json import pytest from shuup.admin.modules.orders.mass_actions import CancelOrderAction, OrderDeliveryPdfAction, \ OrderConfirmationPdfAction from shuup.admin.modules.orders.views import OrderListView from shuup.core.models import Order from shuup.core.models import OrderStatusRole from shuup.testing.factories import get_default_supplier, get_default_shop, create_random_order, create_product, \ create_random_person from shuup.testing.utils import apply_request_middleware from shuup_tests.utils import printable_gibberish try: import weasyprint except ImportError: weasyprint = None @pytest.mark.django_db def test_mass_edit_orders(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": CancelOrderAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 for order in Order.objects.all(): assert order.status.role == OrderStatusRole.CANCELED @pytest.mark.django_db def test_mass_edit_orders2(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": OrderConfirmationPdfAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_%s_confirmation.pdf' % order.pk else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders3(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order1 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) order2 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order1.status.role != OrderStatusRole.CANCELED assert order2.status.role != OrderStatusRole.CANCELED payload = { "action": OrderConfirmationPdfAction().identifier, "values": [order1.pk, order2.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_confirmation_pdf.zip' else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders4(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order.status.role != OrderStatusRole.CANCELED payload = { "action": OrderDeliveryPdfAction().identifier, "values": [order.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 assert response["content-type"] == "application/json" order.create_shipment_of_all_products(supplier) order.save() request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=shipment_%s_delivery.pdf' % order.pk else: assert response["content-type"] == "application/json" @pytest.mark.django_db def test_mass_edit_orders5(rf, admin_user): shop = get_default_shop() supplier = get_default_supplier() contact1 = create_random_person() product1 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50") product2 = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="501") order1 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) order2 = create_random_order(customer=contact1, products=[product1, product2], completion_probability=0) assert order1.status.role != OrderStatusRole.CANCELED assert order2.status.role != OrderStatusRole.CANCELED payload = { "action": OrderDeliveryPdfAction().identifier, "values": [order1.pk, order2.pk] } request = apply_request_middleware(rf.post( "/", user=admin_user, )) order1.create_shipment_of_all_products(supplier) order1.save() order2.create_shipment_of_all_products(supplier) order2.save() request._body = json.dumps(payload).encode("UTF-8") view = OrderListView.as_view() response = view(request=request) assert response.status_code == 200 if weasyprint: assert response['Content-Disposition'] == 'attachment; filename=order_delivery_pdf.zip' else: assert response["content-type"] == "application/json"
suutari-ai/shoop
shuup_tests/admin/test_order_mass_actions.py
shuup_tests/utils/test_namemixin.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.contrib.postgres.fields class Migration(migrations.Migration): dependencies = [ ('userstories', '0006_auto_20141014_1524'), ] operations = [ migrations.AddField( model_name='userstory', name='external_reference', field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=False, null=False), blank=True, default=None, null=True, size=None, verbose_name='external reference'), preserve_default=True, ), ]
# -*- coding: utf-8 -*- # Copyright (C) 2014-present Taiga Agile LLC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.urls import reverse from taiga.base.utils import json from taiga.projects import choices as project_choices from taiga.projects.custom_attributes import serializers from taiga.permissions.choices import (MEMBERS_PERMISSIONS, ANON_PERMISSIONS) from tests import factories as f from tests.utils import helper_test_http_method import pytest pytestmark = pytest.mark.django_db @pytest.fixture def data(): m = type("Models", (object,), {}) m.registered_user = f.UserFactory.create() m.project_member_with_perms = f.UserFactory.create() m.project_member_without_perms = f.UserFactory.create() m.project_owner = f.UserFactory.create() m.other_user = f.UserFactory.create() m.superuser = f.UserFactory.create(is_superuser=True) m.public_project = f.ProjectFactory(is_private=False, anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), owner=m.project_owner) m.private_project1 = f.ProjectFactory(is_private=True, anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), owner=m.project_owner) m.private_project2 = f.ProjectFactory(is_private=True, anon_permissions=[], public_permissions=[], owner=m.project_owner) m.blocked_project = f.ProjectFactory(is_private=True, anon_permissions=[], public_permissions=[], owner=m.project_owner, blocked_code=project_choices.BLOCKED_BY_STAFF) m.public_membership = f.MembershipFactory(project=m.public_project, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.public_project, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) m.private_membership1 = f.MembershipFactory(project=m.private_project1, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.private_project1, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.private_project1, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.private_project1, role__permissions=[]) m.private_membership2 = f.MembershipFactory(project=m.private_project2, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.private_project2, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.private_project2, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.private_project2, role__permissions=[]) m.blocked_membership = f.MembershipFactory(project=m.blocked_project, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.blocked_project, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.blocked_project, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.blocked_project, role__permissions=[]) f.MembershipFactory(project=m.public_project, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.private_project1, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.private_project2, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.blocked_project, user=m.project_owner, is_admin=True) m.public_issue_ca = f.IssueCustomAttributeFactory(project=m.public_project) m.private_issue_ca1 = f.IssueCustomAttributeFactory(project=m.private_project1) m.private_issue_ca2 = f.IssueCustomAttributeFactory(project=m.private_project2) m.blocked_issue_ca = f.IssueCustomAttributeFactory(project=m.blocked_project) m.public_issue = f.IssueFactory(project=m.public_project, status__project=m.public_project, severity__project=m.public_project, priority__project=m.public_project, type__project=m.public_project, milestone__project=m.public_project) m.private_issue1 = f.IssueFactory(project=m.private_project1, status__project=m.private_project1, severity__project=m.private_project1, priority__project=m.private_project1, type__project=m.private_project1, milestone__project=m.private_project1) m.private_issue2 = f.IssueFactory(project=m.private_project2, status__project=m.private_project2, severity__project=m.private_project2, priority__project=m.private_project2, type__project=m.private_project2, milestone__project=m.private_project2) m.blocked_issue = f.IssueFactory(project=m.blocked_project, status__project=m.blocked_project, severity__project=m.blocked_project, priority__project=m.blocked_project, type__project=m.blocked_project, milestone__project=m.blocked_project) m.public_issue_cav = m.public_issue.custom_attributes_values m.private_issue_cav1 = m.private_issue1.custom_attributes_values m.private_issue_cav2 = m.private_issue2.custom_attributes_values m.blocked_issue_cav = m.blocked_issue.custom_attributes_values return m ######################################################### # Issue Custom Attribute ######################################################### def test_issue_custom_attribute_retrieve(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'get', public_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private1_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private2_url, None, users) assert results == [401, 403, 403, 200, 200] results = helper_test_http_method(client, 'get', blocked_url, None, users) assert results == [401, 403, 403, 200, 200] def test_issue_custom_attribute_create(client, data): public_url = reverse('issue-custom-attributes-list') private1_url = reverse('issue-custom-attributes-list') private2_url = reverse('issue-custom-attributes-list') blocked_url = reverse('issue-custom-attributes-list') users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_ca_data = {"name": "test-new", "project": data.public_project.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', public_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.private_project1.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private1_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.private_project2.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.blocked_project.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_update(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.public_issue_ca).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', public_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.private_issue_ca1).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', private1_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.private_issue_ca2).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.blocked_issue_ca).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', blocked_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_delete(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'delete', public_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', private1_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', private2_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', blocked_url, None, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_list(client, data): url = reverse('issue-custom-attributes-list') response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.registered_user) response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.project_member_without_perms) response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.project_member_with_perms) response = client.json.get(url) assert len(response.data) == 4 assert response.status_code == 200 client.login(data.project_owner) response = client.json.get(url) assert len(response.data) == 4 assert response.status_code == 200 def test_issue_custom_attribute_patch(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_action_bulk_update_order(client, data): url = reverse('issue-custom-attributes-bulk-update-order') users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.public_project.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.private_project1.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.private_project2.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.blocked_project.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 451] ######################################################### # Issue Custom Attribute ######################################################### def test_issue_custom_attributes_values_retrieve(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'get', public_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private_url1, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private_url2, None, users) assert results == [401, 403, 403, 200, 200] results = helper_test_http_method(client, 'get', blocked_url, None, users) assert results == [401, 403, 403, 200, 200] def test_issue_custom_attributes_values_update(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.public_issue_cav).data issue_data["attributes_values"] = {str(data.public_issue_ca.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', public_url, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.private_issue_cav1).data issue_data["attributes_values"] = {str(data.private_issue_ca1.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', private_url1, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.private_issue_cav2).data issue_data["attributes_values"] = {str(data.private_issue_ca2.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', private_url2, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.blocked_issue_cav).data issue_data["attributes_values"] = {str(data.blocked_issue_ca.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', blocked_url, issue_data, users) assert results == [401, 403, 403, 451, 451] def test_issue_custom_attributes_values_patch(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] patch_data = json.dumps({"attributes_values": {str(data.public_issue_ca.pk): "test"}, "version": data.public_issue.version}) results = helper_test_http_method(client, 'patch', public_url, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.private_issue_ca1.pk): "test"}, "version": data.private_issue1.version}) results = helper_test_http_method(client, 'patch', private_url1, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.private_issue_ca2.pk): "test"}, "version": data.private_issue2.version}) results = helper_test_http_method(client, 'patch', private_url2, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.blocked_issue_ca.pk): "test"}, "version": data.blocked_issue.version}) results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users) assert results == [401, 403, 403, 451, 451]
taigaio/taiga-back
tests/integration/resources_permissions/test_issues_custom_attributes_resource.py
taiga/projects/userstories/migrations/0007_userstory_external_reference.py
# Generated by Django 2.2.12 on 2020-06-15 08:11 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('notifications', '0008_auto_20181010_1124'), ] operations = [ migrations.AlterField( model_name='notifypolicy', name='web_notify_level', field=models.BooleanField(blank=True, default=True), ), ]
# -*- coding: utf-8 -*- # Copyright (C) 2014-present Taiga Agile LLC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.urls import reverse from taiga.base.utils import json from taiga.projects import choices as project_choices from taiga.projects.custom_attributes import serializers from taiga.permissions.choices import (MEMBERS_PERMISSIONS, ANON_PERMISSIONS) from tests import factories as f from tests.utils import helper_test_http_method import pytest pytestmark = pytest.mark.django_db @pytest.fixture def data(): m = type("Models", (object,), {}) m.registered_user = f.UserFactory.create() m.project_member_with_perms = f.UserFactory.create() m.project_member_without_perms = f.UserFactory.create() m.project_owner = f.UserFactory.create() m.other_user = f.UserFactory.create() m.superuser = f.UserFactory.create(is_superuser=True) m.public_project = f.ProjectFactory(is_private=False, anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), owner=m.project_owner) m.private_project1 = f.ProjectFactory(is_private=True, anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), owner=m.project_owner) m.private_project2 = f.ProjectFactory(is_private=True, anon_permissions=[], public_permissions=[], owner=m.project_owner) m.blocked_project = f.ProjectFactory(is_private=True, anon_permissions=[], public_permissions=[], owner=m.project_owner, blocked_code=project_choices.BLOCKED_BY_STAFF) m.public_membership = f.MembershipFactory(project=m.public_project, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.public_project, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) m.private_membership1 = f.MembershipFactory(project=m.private_project1, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.private_project1, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.private_project1, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.private_project1, role__permissions=[]) m.private_membership2 = f.MembershipFactory(project=m.private_project2, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.private_project2, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.private_project2, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.private_project2, role__permissions=[]) m.blocked_membership = f.MembershipFactory(project=m.blocked_project, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.blocked_project, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.blocked_project, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.blocked_project, role__permissions=[]) f.MembershipFactory(project=m.public_project, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.private_project1, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.private_project2, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.blocked_project, user=m.project_owner, is_admin=True) m.public_issue_ca = f.IssueCustomAttributeFactory(project=m.public_project) m.private_issue_ca1 = f.IssueCustomAttributeFactory(project=m.private_project1) m.private_issue_ca2 = f.IssueCustomAttributeFactory(project=m.private_project2) m.blocked_issue_ca = f.IssueCustomAttributeFactory(project=m.blocked_project) m.public_issue = f.IssueFactory(project=m.public_project, status__project=m.public_project, severity__project=m.public_project, priority__project=m.public_project, type__project=m.public_project, milestone__project=m.public_project) m.private_issue1 = f.IssueFactory(project=m.private_project1, status__project=m.private_project1, severity__project=m.private_project1, priority__project=m.private_project1, type__project=m.private_project1, milestone__project=m.private_project1) m.private_issue2 = f.IssueFactory(project=m.private_project2, status__project=m.private_project2, severity__project=m.private_project2, priority__project=m.private_project2, type__project=m.private_project2, milestone__project=m.private_project2) m.blocked_issue = f.IssueFactory(project=m.blocked_project, status__project=m.blocked_project, severity__project=m.blocked_project, priority__project=m.blocked_project, type__project=m.blocked_project, milestone__project=m.blocked_project) m.public_issue_cav = m.public_issue.custom_attributes_values m.private_issue_cav1 = m.private_issue1.custom_attributes_values m.private_issue_cav2 = m.private_issue2.custom_attributes_values m.blocked_issue_cav = m.blocked_issue.custom_attributes_values return m ######################################################### # Issue Custom Attribute ######################################################### def test_issue_custom_attribute_retrieve(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'get', public_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private1_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private2_url, None, users) assert results == [401, 403, 403, 200, 200] results = helper_test_http_method(client, 'get', blocked_url, None, users) assert results == [401, 403, 403, 200, 200] def test_issue_custom_attribute_create(client, data): public_url = reverse('issue-custom-attributes-list') private1_url = reverse('issue-custom-attributes-list') private2_url = reverse('issue-custom-attributes-list') blocked_url = reverse('issue-custom-attributes-list') users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_ca_data = {"name": "test-new", "project": data.public_project.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', public_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.private_project1.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private1_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.private_project2.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.blocked_project.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_update(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.public_issue_ca).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', public_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.private_issue_ca1).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', private1_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.private_issue_ca2).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.blocked_issue_ca).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', blocked_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_delete(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'delete', public_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', private1_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', private2_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', blocked_url, None, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_list(client, data): url = reverse('issue-custom-attributes-list') response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.registered_user) response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.project_member_without_perms) response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.project_member_with_perms) response = client.json.get(url) assert len(response.data) == 4 assert response.status_code == 200 client.login(data.project_owner) response = client.json.get(url) assert len(response.data) == 4 assert response.status_code == 200 def test_issue_custom_attribute_patch(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_action_bulk_update_order(client, data): url = reverse('issue-custom-attributes-bulk-update-order') users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.public_project.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.private_project1.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.private_project2.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.blocked_project.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 451] ######################################################### # Issue Custom Attribute ######################################################### def test_issue_custom_attributes_values_retrieve(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'get', public_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private_url1, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private_url2, None, users) assert results == [401, 403, 403, 200, 200] results = helper_test_http_method(client, 'get', blocked_url, None, users) assert results == [401, 403, 403, 200, 200] def test_issue_custom_attributes_values_update(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.public_issue_cav).data issue_data["attributes_values"] = {str(data.public_issue_ca.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', public_url, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.private_issue_cav1).data issue_data["attributes_values"] = {str(data.private_issue_ca1.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', private_url1, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.private_issue_cav2).data issue_data["attributes_values"] = {str(data.private_issue_ca2.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', private_url2, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.blocked_issue_cav).data issue_data["attributes_values"] = {str(data.blocked_issue_ca.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', blocked_url, issue_data, users) assert results == [401, 403, 403, 451, 451] def test_issue_custom_attributes_values_patch(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] patch_data = json.dumps({"attributes_values": {str(data.public_issue_ca.pk): "test"}, "version": data.public_issue.version}) results = helper_test_http_method(client, 'patch', public_url, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.private_issue_ca1.pk): "test"}, "version": data.private_issue1.version}) results = helper_test_http_method(client, 'patch', private_url1, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.private_issue_ca2.pk): "test"}, "version": data.private_issue2.version}) results = helper_test_http_method(client, 'patch', private_url2, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.blocked_issue_ca.pk): "test"}, "version": data.blocked_issue.version}) results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users) assert results == [401, 403, 403, 451, 451]
taigaio/taiga-back
tests/integration/resources_permissions/test_issues_custom_attributes_resource.py
taiga/projects/notifications/migrations/0009_auto_20200615_0811.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import connection, migrations, models from django.utils.timezone import utc import datetime def update_totals(apps, schema_editor): model = apps.get_model("projects", "Project") type = apps.get_model("contenttypes", "ContentType").objects.get_for_model(model) sql=""" UPDATE projects_project SET totals_updated_datetime = totals.totals_updated_datetime, total_fans = totals.total_fans, total_fans_last_week = totals.total_fans_last_week, total_fans_last_month = totals.total_fans_last_month, total_fans_last_year = totals.total_fans_last_year, total_activity = totals.total_activity, total_activity_last_week = totals.total_activity_last_week, total_activity_last_month = totals.total_activity_last_month, total_activity_last_year = totals.total_activity_last_year FROM ( WITH totals_activity AS (SELECT split_part(timeline_timeline.namespace, ':', 2)::integer as project_id, count(timeline_timeline.namespace) total_activity, MAX (created) updated_datetime FROM timeline_timeline WHERE namespace LIKE 'project:%' GROUP BY namespace), totals_activity_week AS (SELECT split_part(timeline_timeline.namespace, ':', 2)::integer as project_id, count(timeline_timeline.namespace) total_activity_last_week FROM timeline_timeline WHERE namespace LIKE 'project:%' AND timeline_timeline.created > current_date - interval '7' day GROUP BY namespace), totals_activity_month AS (SELECT split_part(timeline_timeline.namespace, ':', 2)::integer as project_id, count(timeline_timeline.namespace) total_activity_last_month FROM timeline_timeline WHERE namespace LIKE 'project:%' AND timeline_timeline.created > current_date - interval '30' day GROUP BY namespace), totals_activity_year AS (SELECT split_part(timeline_timeline.namespace, ':', 2)::integer as project_id, count(timeline_timeline.namespace) total_activity_last_year FROM timeline_timeline WHERE namespace LIKE 'project:%' AND timeline_timeline.created > current_date - interval '365' day GROUP BY namespace), totals_fans AS (SELECT object_id as project_id, COUNT(likes_like.object_id) total_fans, MAX (created_date) updated_datetime FROM likes_like WHERE content_type_id = {type_id} GROUP BY object_id), totals_fans_week AS (SELECT object_id as project_id, COUNT(likes_like.object_id) total_fans_last_week FROM likes_like WHERE content_type_id = {type_id} AND likes_like.created_date > current_date - interval '7' day GROUP BY object_id), totals_fans_month AS (SELECT object_id as project_id, COUNT(likes_like.object_id) total_fans_last_month FROM likes_like WHERE content_type_id = {type_id} AND likes_like.created_date > current_date - interval '30' day GROUP BY object_id), totals_fans_year AS (SELECT object_id as project_id, COUNT(likes_like.object_id) total_fans_last_year FROM likes_like WHERE content_type_id = {type_id} AND likes_like.created_date > current_date - interval '365' day GROUP BY object_id) SELECT totals_activity.project_id, COALESCE(total_activity, 0) total_activity, COALESCE(total_activity_last_week, 0) total_activity_last_week, COALESCE(total_activity_last_month, 0) total_activity_last_month, COALESCE(total_activity_last_year, 0) total_activity_last_year, COALESCE(total_fans, 0) total_fans, COALESCE(total_fans_last_week, 0) total_fans_last_week, COALESCE(total_fans_last_month, 0) total_fans_last_month, COALESCE(total_fans_last_year, 0) total_fans_last_year, totals_activity.updated_datetime totals_updated_datetime FROM totals_activity LEFT JOIN totals_fans ON totals_activity.project_id = totals_fans.project_id LEFT JOIN totals_fans_week ON totals_activity.project_id = totals_fans_week.project_id LEFT JOIN totals_fans_month ON totals_activity.project_id = totals_fans_month.project_id LEFT JOIN totals_fans_year ON totals_activity.project_id = totals_fans_year.project_id LEFT JOIN totals_activity_week ON totals_activity.project_id = totals_activity_week.project_id LEFT JOIN totals_activity_month ON totals_activity.project_id = totals_activity_month.project_id LEFT JOIN totals_activity_year ON totals_activity.project_id = totals_activity_year.project_id ) totals WHERE projects_project.id = totals.project_id """.format(type_id=type.id) cursor = connection.cursor() cursor.execute(sql) class Migration(migrations.Migration): dependencies = [ ('projects', '0029_project_is_looking_for_people'), ('likes', '0001_initial'), ('timeline', '0004_auto_20150603_1312'), ('likes', '0001_initial'), ] operations = [ migrations.AddField( model_name='project', name='total_activity', field=models.PositiveIntegerField(default=0, verbose_name='count', db_index=True), ), migrations.AddField( model_name='project', name='total_activity_last_month', field=models.PositiveIntegerField(default=0, verbose_name='activity last month', db_index=True), ), migrations.AddField( model_name='project', name='total_activity_last_week', field=models.PositiveIntegerField(default=0, verbose_name='activity last week', db_index=True), ), migrations.AddField( model_name='project', name='total_activity_last_year', field=models.PositiveIntegerField(default=0, verbose_name='activity last year', db_index=True), ), migrations.AddField( model_name='project', name='total_fans', field=models.PositiveIntegerField(default=0, verbose_name='count', db_index=True), ), migrations.AddField( model_name='project', name='total_fans_last_month', field=models.PositiveIntegerField(default=0, verbose_name='fans last month', db_index=True), ), migrations.AddField( model_name='project', name='total_fans_last_week', field=models.PositiveIntegerField(default=0, verbose_name='fans last week', db_index=True), ), migrations.AddField( model_name='project', name='total_fans_last_year', field=models.PositiveIntegerField(default=0, verbose_name='fans last year', db_index=True), ), migrations.AddField( model_name='project', name='totals_updated_datetime', field=models.DateTimeField(default=datetime.datetime(2015, 11, 28, 7, 57, 11, 743976, tzinfo=utc), auto_now_add=True, verbose_name='updated date time', db_index=True), preserve_default=False, ), migrations.RunPython(update_totals), ]
# -*- coding: utf-8 -*- # Copyright (C) 2014-present Taiga Agile LLC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.urls import reverse from taiga.base.utils import json from taiga.projects import choices as project_choices from taiga.projects.custom_attributes import serializers from taiga.permissions.choices import (MEMBERS_PERMISSIONS, ANON_PERMISSIONS) from tests import factories as f from tests.utils import helper_test_http_method import pytest pytestmark = pytest.mark.django_db @pytest.fixture def data(): m = type("Models", (object,), {}) m.registered_user = f.UserFactory.create() m.project_member_with_perms = f.UserFactory.create() m.project_member_without_perms = f.UserFactory.create() m.project_owner = f.UserFactory.create() m.other_user = f.UserFactory.create() m.superuser = f.UserFactory.create(is_superuser=True) m.public_project = f.ProjectFactory(is_private=False, anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), owner=m.project_owner) m.private_project1 = f.ProjectFactory(is_private=True, anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)), owner=m.project_owner) m.private_project2 = f.ProjectFactory(is_private=True, anon_permissions=[], public_permissions=[], owner=m.project_owner) m.blocked_project = f.ProjectFactory(is_private=True, anon_permissions=[], public_permissions=[], owner=m.project_owner, blocked_code=project_choices.BLOCKED_BY_STAFF) m.public_membership = f.MembershipFactory(project=m.public_project, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.public_project, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) m.private_membership1 = f.MembershipFactory(project=m.private_project1, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.private_project1, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.private_project1, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.private_project1, role__permissions=[]) m.private_membership2 = f.MembershipFactory(project=m.private_project2, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.private_project2, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.private_project2, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.private_project2, role__permissions=[]) m.blocked_membership = f.MembershipFactory(project=m.blocked_project, user=m.project_member_with_perms, email=m.project_member_with_perms.email, role__project=m.blocked_project, role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS))) f.MembershipFactory(project=m.blocked_project, user=m.project_member_without_perms, email=m.project_member_without_perms.email, role__project=m.blocked_project, role__permissions=[]) f.MembershipFactory(project=m.public_project, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.private_project1, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.private_project2, user=m.project_owner, is_admin=True) f.MembershipFactory(project=m.blocked_project, user=m.project_owner, is_admin=True) m.public_issue_ca = f.IssueCustomAttributeFactory(project=m.public_project) m.private_issue_ca1 = f.IssueCustomAttributeFactory(project=m.private_project1) m.private_issue_ca2 = f.IssueCustomAttributeFactory(project=m.private_project2) m.blocked_issue_ca = f.IssueCustomAttributeFactory(project=m.blocked_project) m.public_issue = f.IssueFactory(project=m.public_project, status__project=m.public_project, severity__project=m.public_project, priority__project=m.public_project, type__project=m.public_project, milestone__project=m.public_project) m.private_issue1 = f.IssueFactory(project=m.private_project1, status__project=m.private_project1, severity__project=m.private_project1, priority__project=m.private_project1, type__project=m.private_project1, milestone__project=m.private_project1) m.private_issue2 = f.IssueFactory(project=m.private_project2, status__project=m.private_project2, severity__project=m.private_project2, priority__project=m.private_project2, type__project=m.private_project2, milestone__project=m.private_project2) m.blocked_issue = f.IssueFactory(project=m.blocked_project, status__project=m.blocked_project, severity__project=m.blocked_project, priority__project=m.blocked_project, type__project=m.blocked_project, milestone__project=m.blocked_project) m.public_issue_cav = m.public_issue.custom_attributes_values m.private_issue_cav1 = m.private_issue1.custom_attributes_values m.private_issue_cav2 = m.private_issue2.custom_attributes_values m.blocked_issue_cav = m.blocked_issue.custom_attributes_values return m ######################################################### # Issue Custom Attribute ######################################################### def test_issue_custom_attribute_retrieve(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'get', public_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private1_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private2_url, None, users) assert results == [401, 403, 403, 200, 200] results = helper_test_http_method(client, 'get', blocked_url, None, users) assert results == [401, 403, 403, 200, 200] def test_issue_custom_attribute_create(client, data): public_url = reverse('issue-custom-attributes-list') private1_url = reverse('issue-custom-attributes-list') private2_url = reverse('issue-custom-attributes-list') blocked_url = reverse('issue-custom-attributes-list') users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_ca_data = {"name": "test-new", "project": data.public_project.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', public_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.private_project1.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private1_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.private_project2.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 201] issue_ca_data = {"name": "test-new", "project": data.blocked_project.id} issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'post', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_update(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.public_issue_ca).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', public_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.private_issue_ca1).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', private1_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.private_issue_ca2).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', private2_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 200] issue_ca_data = serializers.IssueCustomAttributeSerializer(data.blocked_issue_ca).data issue_ca_data["name"] = "test" issue_ca_data = json.dumps(issue_ca_data) results = helper_test_http_method(client, 'put', blocked_url, issue_ca_data, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_delete(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'delete', public_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', private1_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', private2_url, None, users) assert results == [401, 403, 403, 403, 204] results = helper_test_http_method(client, 'delete', blocked_url, None, users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_list(client, data): url = reverse('issue-custom-attributes-list') response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.registered_user) response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.project_member_without_perms) response = client.json.get(url) assert len(response.data) == 2 assert response.status_code == 200 client.login(data.project_member_with_perms) response = client.json.get(url) assert len(response.data) == 4 assert response.status_code == 200 client.login(data.project_owner) response = client.json.get(url) assert len(response.data) == 4 assert response.status_code == 200 def test_issue_custom_attribute_patch(client, data): public_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.public_issue_ca.pk}) private1_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca1.pk}) private2_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.private_issue_ca2.pk}) blocked_url = reverse('issue-custom-attributes-detail', kwargs={"pk": data.blocked_issue_ca.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 200] results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users) assert results == [401, 403, 403, 403, 451] def test_issue_custom_attribute_action_bulk_update_order(client, data): url = reverse('issue-custom-attributes-bulk-update-order') users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.public_project.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.private_project1.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.private_project2.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 204] post_data = json.dumps({ "bulk_issue_custom_attributes": [(1,2)], "project": data.blocked_project.pk }) results = helper_test_http_method(client, 'post', url, post_data, users) assert results == [401, 403, 403, 403, 451] ######################################################### # Issue Custom Attribute ######################################################### def test_issue_custom_attributes_values_retrieve(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] results = helper_test_http_method(client, 'get', public_url, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private_url1, None, users) assert results == [200, 200, 200, 200, 200] results = helper_test_http_method(client, 'get', private_url2, None, users) assert results == [401, 403, 403, 200, 200] results = helper_test_http_method(client, 'get', blocked_url, None, users) assert results == [401, 403, 403, 200, 200] def test_issue_custom_attributes_values_update(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.public_issue_cav).data issue_data["attributes_values"] = {str(data.public_issue_ca.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', public_url, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.private_issue_cav1).data issue_data["attributes_values"] = {str(data.private_issue_ca1.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', private_url1, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.private_issue_cav2).data issue_data["attributes_values"] = {str(data.private_issue_ca2.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', private_url2, issue_data, users) assert results == [401, 403, 403, 200, 200] issue_data = serializers.IssueCustomAttributesValuesSerializer(data.blocked_issue_cav).data issue_data["attributes_values"] = {str(data.blocked_issue_ca.pk): "test"} issue_data = json.dumps(issue_data) results = helper_test_http_method(client, 'put', blocked_url, issue_data, users) assert results == [401, 403, 403, 451, 451] def test_issue_custom_attributes_values_patch(client, data): public_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.public_issue.pk}) private_url1 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue1.pk}) private_url2 = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.private_issue2.pk}) blocked_url = reverse('issue-custom-attributes-values-detail', kwargs={"issue_id": data.blocked_issue.pk}) users = [ None, data.registered_user, data.project_member_without_perms, data.project_member_with_perms, data.project_owner ] patch_data = json.dumps({"attributes_values": {str(data.public_issue_ca.pk): "test"}, "version": data.public_issue.version}) results = helper_test_http_method(client, 'patch', public_url, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.private_issue_ca1.pk): "test"}, "version": data.private_issue1.version}) results = helper_test_http_method(client, 'patch', private_url1, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.private_issue_ca2.pk): "test"}, "version": data.private_issue2.version}) results = helper_test_http_method(client, 'patch', private_url2, patch_data, users) assert results == [401, 403, 403, 200, 200] patch_data = json.dumps({"attributes_values": {str(data.blocked_issue_ca.pk): "test"}, "version": data.blocked_issue.version}) results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users) assert results == [401, 403, 403, 451, 451]
taigaio/taiga-back
tests/integration/resources_permissions/test_issues_custom_attributes_resource.py
taiga/projects/migrations/0030_auto_20151128_0757.py
"""Helpers for filesystem-dependent tests. """ import os import socket import subprocess import sys from functools import partial from itertools import chain from .path import Path def make_socket_file(path): # Socket paths are limited to 108 characters (sometimes less) so we # chdir before creating it and use a relative path name. cwd = os.getcwd() os.chdir(os.path.dirname(path)) try: sock = socket.socket(socket.AF_UNIX) sock.bind(os.path.basename(path)) finally: os.chdir(cwd) def make_unreadable_file(path): Path(path).touch() os.chmod(path, 0o000) if sys.platform == "win32": # Once we drop PY2 we can use `os.getlogin()` instead. username = os.environ["USERNAME"] # Remove "Read Data/List Directory" permission for current user, but # leave everything else. args = ["icacls", path, "/deny", username + ":(RD)"] subprocess.check_call(args) def get_filelist(base): def join(dirpath, dirnames, filenames): relative_dirpath = os.path.relpath(dirpath, base) join_dirpath = partial(os.path.join, relative_dirpath) return chain( (join_dirpath(p) for p in dirnames), (join_dirpath(p) for p in filenames), ) return set(chain.from_iterable(join(*dirinfo) for dirinfo in os.walk(base)))
import pytest from pip._internal.vcs.subversion import Subversion from pip._internal.vcs.versioncontrol import RemoteNotFoundError from tests.lib import _create_svn_repo, need_svn @need_svn def test_get_remote_url__no_remote(script, tmpdir): repo_dir = tmpdir / 'temp-repo' repo_dir.mkdir() repo_dir = str(repo_dir) _create_svn_repo(script, repo_dir) with pytest.raises(RemoteNotFoundError): Subversion().get_remote_url(repo_dir) @need_svn def test_get_remote_url__no_remote_with_setup(script, tmpdir): repo_dir = tmpdir / 'temp-repo' repo_dir.mkdir() setup = repo_dir / "setup.py" setup.touch() repo_dir = str(repo_dir) _create_svn_repo(script, repo_dir) with pytest.raises(RemoteNotFoundError): Subversion().get_remote_url(repo_dir)
pfmoore/pip
tests/functional/test_vcs_subversion.py
tests/lib/filesystem.py
from threading import Event, Thread, current_thread from time import time from warnings import warn import atexit __all__ = ["TMonitor", "TqdmSynchronisationWarning"] class TqdmSynchronisationWarning(RuntimeWarning): """tqdm multi-thread/-process errors which may cause incorrect nesting but otherwise no adverse effects""" pass class TMonitor(Thread): """ Monitoring thread for tqdm bars. Monitors if tqdm bars are taking too much time to display and readjusts miniters automatically if necessary. Parameters ---------- tqdm_cls : class tqdm class to use (can be core tqdm or a submodule). sleep_interval : fload Time to sleep between monitoring checks. """ # internal vars for unit testing _time = None _event = None def __init__(self, tqdm_cls, sleep_interval): Thread.__init__(self) self.daemon = True # kill thread when main killed (KeyboardInterrupt) self.was_killed = Event() self.woken = 0 # last time woken up, to sync with monitor self.tqdm_cls = tqdm_cls self.sleep_interval = sleep_interval if TMonitor._time is not None: self._time = TMonitor._time else: self._time = time if TMonitor._event is not None: self._event = TMonitor._event else: self._event = Event atexit.register(self.exit) self.start() def exit(self): self.was_killed.set() if self is not current_thread(): self.join() return self.report() def get_instances(self): # returns a copy of started `tqdm_cls` instances return [i for i in self.tqdm_cls._instances.copy() # Avoid race by checking that the instance started if hasattr(i, 'start_t')] def run(self): cur_t = self._time() while True: # After processing and before sleeping, notify that we woke # Need to be done just before sleeping self.woken = cur_t # Sleep some time... self.was_killed.wait(self.sleep_interval) # Quit if killed if self.was_killed.is_set(): return # Then monitor! # Acquire lock (to access _instances) with self.tqdm_cls.get_lock(): cur_t = self._time() # Check tqdm instances are waiting too long to print instances = self.get_instances() for instance in instances: # Check event in loop to reduce blocking time on exit if self.was_killed.is_set(): return # Only if mininterval > 1 (else iterations are just slow) # and last refresh exceeded maxinterval if instance.miniters > 1 and \ (cur_t - instance.last_print_t) >= \ instance.maxinterval: # force bypassing miniters on next iteration # (dynamic_miniters adjusts mininterval automatically) instance.miniters = 1 # Refresh now! (works only for manual tqdm) instance.refresh(nolock=True) if instances != self.get_instances(): # pragma: nocover warn("Set changed size during iteration" + " (see https://github.com/tqdm/tqdm/issues/481)", TqdmSynchronisationWarning, stacklevel=2) def report(self): return not self.was_killed.is_set()
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Denis Engemann <denis.engemann@gmail.com> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Eric Larson <larson.eric.d@gmail.com> # Robert Luke <mail@robertluke.net> # # License: Simplified BSD import os.path as op from functools import partial import numpy as np from numpy.testing import assert_array_equal, assert_equal import pytest import matplotlib import matplotlib.pyplot as plt from matplotlib.patches import Circle from mne import (read_evokeds, read_proj, make_fixed_length_events, Epochs, compute_proj_evoked, find_layout, pick_types, create_info, read_cov) from mne.io.proj import make_eeg_average_ref_proj, Projection from mne.io import read_raw_fif, read_info, RawArray from mne.io.constants import FIFF from mne.io.pick import pick_info, channel_indices_by_type from mne.io.compensator import get_current_comp from mne.channels import read_layout, make_dig_montage from mne.datasets import testing from mne.time_frequency.tfr import AverageTFR from mne.viz import plot_evoked_topomap, plot_projs_topomap, topomap from mne.viz.topomap import (_get_pos_outlines, _onselect, plot_topomap, plot_arrowmap, plot_psds_topomap) from mne.viz.utils import _find_peaks, _fake_click from mne.utils import requires_sklearn data_dir = testing.data_path(download=False) subjects_dir = op.join(data_dir, 'subjects') ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg-proj.fif') triux_fname = op.join(data_dir, 'SSS', 'TRIUX', 'triux_bmlhus_erm_raw.fif') base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data') evoked_fname = op.join(base_dir, 'test-ave.fif') raw_fname = op.join(base_dir, 'test_raw.fif') event_name = op.join(base_dir, 'test-eve.fif') ctf_fname = op.join(base_dir, 'test_ctf_comp_raw.fif') layout = read_layout('Vectorview-all') cov_fname = op.join(base_dir, 'test-cov.fif') def test_plot_topomap_interactive(): """Test interactive topomap projection plotting.""" evoked = read_evokeds(evoked_fname, baseline=(None, 0))[0] evoked.pick_types(meg='mag') evoked.info['projs'] = [] assert not evoked.proj evoked.add_proj(compute_proj_evoked(evoked, n_mag=1)) plt.close('all') fig = plt.figure() ax, canvas = fig.gca(), fig.canvas kwargs = dict(vmin=-240, vmax=240, times=[0.1], colorbar=False, axes=ax, res=8, time_unit='s') evoked.copy().plot_topomap(proj=False, **kwargs) canvas.draw() image_noproj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj=True, **kwargs) canvas.draw() image_proj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert not np.array_equal(image_noproj, image_proj) assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj='interactive', **kwargs) canvas.draw() image_interactive = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive) assert not np.array_equal(image_proj, image_interactive) assert len(plt.get_fignums()) == 2 proj_fig = plt.figure(plt.get_fignums()[-1]) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_proj, image_interactive_click) assert not np.array_equal(image_noproj, image_interactive_click) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive_click) assert not np.array_equal(image_proj, image_interactive_click) @testing.requires_testing_data def test_plot_projs_topomap(): """Test plot_projs_topomap.""" projs = read_proj(ecg_fname) info = read_info(raw_fname) fast_test = {"res": 8, "contours": 0, "sensors": False} plot_projs_topomap(projs, info=info, colorbar=True, **fast_test) plt.close('all') ax = plt.subplot(111) projs[3].plot_topomap(info) plot_projs_topomap(projs[:1], info, axes=ax, **fast_test) # test axes plt.close('all') triux_info = read_info(triux_fname) plot_projs_topomap(triux_info['projs'][-1:], triux_info, **fast_test) plt.close('all') plot_projs_topomap(triux_info['projs'][:1], triux_info, **fast_test) plt.close('all') eeg_avg = make_eeg_average_ref_proj(info) eeg_avg.plot_topomap(info, **fast_test) plt.close('all') # test vlims for vlim in ('joint', (-1, 1), (None, 0.5), (0.5, None), (None, None)): plot_projs_topomap(projs[:-1], info, vlim=vlim, colorbar=True) plt.close('all') eeg_proj = make_eeg_average_ref_proj(info) info_meg = pick_info(info, pick_types(info, meg=True, eeg=False)) with pytest.raises(ValueError, match='No channel names in info match p'): plot_projs_topomap([eeg_proj], info_meg) def test_plot_topomap_animation(capsys): """Test topomap plotting.""" # evoked evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) # Test animation _, anim = evoked.animate_topomap(ch_type='grad', times=[0, 0.1], butterfly=False, time_unit='s', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode local to 0' in out plt.close('all') @pytest.mark.filterwarnings('ignore:.*No contour levels.*:UserWarning') def test_plot_topomap_animation_nirs(fnirs_evoked, capsys): """Test topomap plotting for nirs data.""" fig, anim = fnirs_evoked.animate_topomap(ch_type='hbo', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode head to 0' in out assert len(fig.axes) == 2 plt.close('all') @pytest.mark.slowtest def test_plot_topomap_basic(monkeypatch): """Test basics of topomap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) res = 8 fast_test = dict(res=res, contours=0, sensors=False, time_unit='s') fast_test_noscale = dict(res=res, contours=0, sensors=False) ev_bad = evoked.copy().pick_types(meg=False, eeg=True) ev_bad.pick_channels(ev_bad.ch_names[:2]) plt_topomap = partial(ev_bad.plot_topomap, **fast_test) plt_topomap(times=ev_bad.times[:2] - 1e-6) # auto, plots EEG pytest.raises(ValueError, plt_topomap, ch_type='mag') pytest.raises(ValueError, plt_topomap, times=[-100]) # bad time pytest.raises(ValueError, plt_topomap, times=[[0]]) # bad time evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms') # extrapolation to the edges of the convex hull or the head circle evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='local') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head', outlines='skirt') # extrapolation options when < 4 channels: temp_data = np.random.random(3) picks = channel_indices_by_type(evoked.info)['mag'][:3] info_sel = pick_info(evoked.info, picks) plot_topomap(temp_data, info_sel, extrapolate='local', res=res) plot_topomap(temp_data, info_sel, extrapolate='head', res=res) # make sure extrapolation works for 3 channels with border='mean' # (if extra points are placed incorrectly some of them have only # other extra points as neighbours and border='mean' fails) plot_topomap(temp_data, info_sel, extrapolate='local', border='mean', res=res) # border=0 and border='mean': # --------------------------- ch_pos = np.array(sum(([[0, 0, r], [r, 0, 0], [-r, 0, 0], [0, -r, 0], [0, r, 0]] for r in np.linspace(0.2, 1.0, 5)), [])) rng = np.random.RandomState(23) data = np.full(len(ch_pos), 5) + rng.randn(len(ch_pos)) info = create_info(len(ch_pos), 250, 'eeg') ch_pos_dict = {name: pos for name, pos in zip(info['ch_names'], ch_pos)} dig = make_dig_montage(ch_pos_dict, coord_frame='head') info.set_montage(dig) # border=0 ax, _ = plot_topomap(data, info, extrapolate='head', border=0, sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert np.abs(img_data[0, 0]) < 1.5 # border='mean' ax, _ = plot_topomap(data, info, extrapolate='head', border='mean', sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert img_data[0, 0] > 5 # error when not numeric or str: error_msg = 'border must be an instance of numeric or str' with pytest.raises(TypeError, match=error_msg): plot_topomap(data, info, extrapolate='head', border=[1, 2, 3]) # error when str is not 'mean': error_msg = "The only allowed value is 'mean', but got 'fancy' instead." with pytest.raises(ValueError, match=error_msg): plot_topomap(data, info, extrapolate='head', border='fancy') # test channel placement when only 'grad' are picked: # --------------------------------------------------- info_grad = evoked.copy().pick('grad').info n_grads = len(info_grad['ch_names']) data = np.random.randn(n_grads) img, _ = plot_topomap(data, info_grad) # check that channels are scattered around x == 0 pos = img.axes.collections[-1].get_offsets() prop_channels_on_the_right = (pos[:, 0] > 0).mean() assert prop_channels_on_the_right < 0.6 # other: # ------ plt_topomap = partial(evoked.plot_topomap, **fast_test) plt.close('all') axes = [plt.subplot(221), plt.subplot(222)] plt_topomap(axes=axes, colorbar=False) plt.close('all') plt_topomap(times=[-0.1, 0.2]) plt.close('all') evoked_grad = evoked.copy().crop(0, 0).pick_types(meg='grad') mask = np.zeros((204, 1), bool) mask[[0, 3, 5, 6]] = True names = [] def proc_names(x): names.append(x) return x[4:] evoked_grad.plot_topomap(ch_type='grad', times=[0], mask=mask, show_names=proc_names, **fast_test) assert_equal(sorted(names), ['MEG 011x', 'MEG 012x', 'MEG 013x', 'MEG 014x']) mask = np.zeros_like(evoked.data, dtype=bool) mask[[1, 5], :] = True plt_topomap(ch_type='mag', outlines=None) times = [0.1] plt_topomap(times, ch_type='grad', mask=mask) plt_topomap(times, ch_type='planar1') plt_topomap(times, ch_type='planar2') plt_topomap(times, ch_type='grad', mask=mask, show_names=True, mask_params={'marker': 'x'}) plt.close('all') with pytest.raises(ValueError, match='number of seconds; got -'): plt_topomap(times, ch_type='eeg', average=-1e3) with pytest.raises(TypeError, match='number of seconds; got type'): plt_topomap(times, ch_type='eeg', average='x') p = plt_topomap(times, ch_type='grad', image_interp='bilinear', show_names=lambda x: x.replace('MEG', '')) subplot = [x for x in p.get_children() if 'Subplot' in str(type(x))] assert len(subplot) >= 1, [type(x) for x in p.get_children()] subplot = subplot[0] have_all = all('MEG' not in x.get_text() for x in subplot.get_children() if isinstance(x, matplotlib.text.Text)) assert have_all # Plot array for ch_type in ('mag', 'grad'): evoked_ = evoked.copy().pick_types(eeg=False, meg=ch_type) plot_topomap(evoked_.data[:, 0], evoked_.info, **fast_test_noscale) # fail with multiple channel types pytest.raises(ValueError, plot_topomap, evoked.data[0, :], evoked.info) # Test title def get_texts(p): return [x.get_text() for x in p.get_children() if isinstance(x, matplotlib.text.Text)] p = plt_topomap(times, ch_type='eeg', average=0.01) assert_equal(len(get_texts(p)), 0) p = plt_topomap(times, ch_type='eeg', title='Custom') texts = get_texts(p) assert_equal(len(texts), 1) assert_equal(texts[0], 'Custom') plt.close('all') # delaunay triangulation warning plt_topomap(times, ch_type='mag') # projs have already been applied pytest.raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag', proj='interactive', time_unit='s') # change to no-proj mode evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0), proj=False) fig1 = evoked.plot_topomap('interactive', 'mag', proj='interactive', **fast_test) _fake_click(fig1, fig1.axes[1], (0.5, 0.5)) # click slider data_max = np.max(fig1.axes[0].images[0]._A) fig2 = plt.gcf() _fake_click(fig2, fig2.axes[0], (0.075, 0.775)) # toggle projector # make sure projector gets toggled assert (np.max(fig1.axes[0].images[0]._A) != data_max) with monkeypatch.context() as m: # speed it up by not actually plotting m.setattr(topomap, '_plot_topomap', lambda *args, **kwargs: (None, None, None)) with pytest.warns(RuntimeWarning, match='More than 25 topomaps plots'): plot_evoked_topomap(evoked, [0.1] * 26, colorbar=False) pytest.raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6], time_unit='s') for ch in evoked.info['chs']: if ch['coil_type'] == FIFF.FIFFV_COIL_EEG: ch['loc'].fill(0) # Remove extra digitization point, so EEG digitization points # correspond with the EEG electrodes del evoked.info['dig'][85] # Plot skirt evoked.plot_topomap(times, ch_type='eeg', outlines='skirt', **fast_test) # Pass custom outlines without patch eeg_picks = pick_types(evoked.info, meg=False, eeg=True) pos, outlines = _get_pos_outlines(evoked.info, eeg_picks, 0.1) evoked.plot_topomap(times, ch_type='eeg', outlines=outlines, **fast_test) plt.close('all') # Test interactive cmap fig = plot_evoked_topomap(evoked, times=[0., 0.1], ch_type='eeg', cmap=('Reds', True), title='title', **fast_test) fig.canvas.key_press_event('up') fig.canvas.key_press_event(' ') fig.canvas.key_press_event('down') cbar = fig.get_axes()[0].CB # Fake dragging with mouse. ax = cbar.cbar.ax _fake_click(fig, ax, (0.1, 0.1)) _fake_click(fig, ax, (0.1, 0.2), kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') _fake_click(fig, ax, (0.1, 0.1), button=3) _fake_click(fig, ax, (0.1, 0.2), button=3, kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up plt.close('all') # Pass custom outlines with patch callable def patch(): return Circle((0.5, 0.4687), radius=.46, clip_on=True, transform=plt.gca().transAxes) outlines['patch'] = patch plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines, **fast_test) # Remove digitization points. Now topomap should fail evoked.info['dig'] = None pytest.raises(RuntimeError, plot_evoked_topomap, evoked, times, ch_type='eeg', time_unit='s') plt.close('all') # Error for missing names n_channels = len(pos) data = np.ones(n_channels) pytest.raises(ValueError, plot_topomap, data, pos, show_names=True) # Test error messages for invalid pos parameter pos_1d = np.zeros(n_channels) pos_3d = np.zeros((n_channels, 2, 2)) pytest.raises(ValueError, plot_topomap, data, pos_1d) pytest.raises(ValueError, plot_topomap, data, pos_3d) pytest.raises(ValueError, plot_topomap, data, pos[:3, :]) pos_x = pos[:, :1] pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]] pytest.raises(ValueError, plot_topomap, data, pos_x) pytest.raises(ValueError, plot_topomap, data, pos_xyz) # An #channels x 4 matrix should work though. In this case (x, y, width, # height) is assumed. pos_xywh = np.c_[pos, np.zeros((n_channels, 2))] plot_topomap(data, pos_xywh) plt.close('all') # Test peak finder axes = [plt.subplot(131), plt.subplot(132)] evoked.plot_topomap(times='peaks', axes=axes, **fast_test) plt.close('all') evoked.data = np.zeros(evoked.data.shape) evoked.data[50][1] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[1]) evoked.data[80][100] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]]) evoked.data[2][95] = 2 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]]) assert_array_equal(_find_peaks(evoked, 1), evoked.times[95]) # Test excluding bads channels evoked_grad.info['bads'] += [evoked_grad.info['ch_names'][0]] orig_bads = evoked_grad.info['bads'] evoked_grad.plot_topomap(ch_type='grad', times=[0], time_unit='ms') assert_array_equal(evoked_grad.info['bads'], orig_bads) plt.close('all') def test_plot_tfr_topomap(): """Test plotting of TFR data.""" raw = read_raw_fif(raw_fname) times = np.linspace(-0.1, 0.1, 200) res = 8 n_freqs = 3 nave = 1 rng = np.random.RandomState(42) picks = [93, 94, 96, 97, 21, 22, 24, 25, 129, 130, 315, 316, 2, 5, 8, 11] info = pick_info(raw.info, picks) data = rng.randn(len(picks), n_freqs, len(times)) tfr = AverageTFR(info, data, times, np.arange(n_freqs), nave) tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10, res=res, contours=0) eclick = matplotlib.backend_bases.MouseEvent( 'button_press_event', plt.gcf().canvas, 0, 0, 1) eclick.xdata = eclick.ydata = 0.1 eclick.inaxes = plt.gca() erelease = matplotlib.backend_bases.MouseEvent( 'button_release_event', plt.gcf().canvas, 0.9, 0.9, 1) erelease.xdata = 0.3 erelease.ydata = 0.2 pos = np.array([[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]]) _onselect(eclick, erelease, tfr, pos, 'grad', 1, 3, 1, 3, 'RdBu_r', list()) _onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list()) eclick.xdata = eclick.ydata = 0. erelease.xdata = erelease.ydata = 0.9 tfr._onselect(eclick, erelease, None, 'mean', None) plt.close('all') # test plot_psds_topomap info = raw.info.copy() chan_inds = channel_indices_by_type(info) info = pick_info(info, chan_inds['grad'][:4]) fig, axes = plt.subplots() freqs = np.arange(3., 9.5) bands = [(4, 8, 'Theta')] psd = np.random.rand(len(info['ch_names']), freqs.shape[0]) plot_psds_topomap(psd, freqs, info, bands=bands, axes=[axes]) def test_ctf_plotting(): """Test CTF topomap plotting.""" raw = read_raw_fif(ctf_fname, preload=True) assert raw.compensation_grade == 3 events = make_fixed_length_events(raw, duration=0.01) assert len(events) > 10 evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average() assert get_current_comp(evoked.info) == 3 # smoke test that compensation does not matter evoked.plot_topomap(time_unit='s') # better test that topomaps can still be used without plotting ref evoked.pick_types(meg=True, ref_meg=False) evoked.plot_topomap() @pytest.mark.slowtest # can be slow on OSX @testing.requires_testing_data def test_plot_arrowmap(): """Test arrowmap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked.data[:, 0], evoked.info) evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked_eeg.data[:, 0], evoked.info) evoked_mag = evoked.copy().pick_types(meg='mag') evoked_grad = evoked.copy().pick_types(meg='grad') plot_arrowmap(evoked_mag.data[:, 0], evoked_mag.info) plot_arrowmap(evoked_grad.data[:, 0], evoked_grad.info, info_to=evoked_mag.info) @testing.requires_testing_data def test_plot_topomap_neuromag122(): """Test topomap plotting.""" res = 8 fast_test = dict(res=res, contours=0, sensors=False) evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) evoked.pick_types(meg='grad') evoked.pick_channels(evoked.ch_names[:122]) ch_names = ['MEG %03d' % k for k in range(1, 123)] for c in evoked.info['chs']: c['coil_type'] = FIFF.FIFFV_COIL_NM_122 evoked.rename_channels({c_old: c_new for (c_old, c_new) in zip(evoked.ch_names, ch_names)}) layout = find_layout(evoked.info) assert layout.kind.startswith('Neuromag_122') evoked.plot_topomap(times=[0.1], **fast_test) proj = Projection(active=False, desc="test", kind=1, data=dict(nrow=1, ncol=122, row_names=None, col_names=evoked.ch_names, data=np.ones(122)), explained_var=0.5) plot_projs_topomap([proj], evoked.info, **fast_test) def test_plot_topomap_bads(): """Test plotting topomap with bad channels (gh-7213).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(3, 1000) raw = RawArray(data, create_info(3, 1000., 'eeg')) ch_pos_dict = {name: pos for name, pos in zip(raw.ch_names, np.eye(3))} raw.info.set_montage(make_dig_montage(ch_pos_dict, coord_frame='head')) for count in range(3): raw.info['bads'] = raw.ch_names[:count] raw.info._check_consistency() plot_topomap(data[:, 0], raw.info) plt.close('all') def test_plot_topomap_bads_grad(): """Test plotting topomap with bad gradiometer channels (gh-8802).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(203) info = read_info(evoked_fname) info['bads'] = ['MEG 2242'] picks = pick_types(info, meg='grad') info = pick_info(info, picks) assert len(info['chs']) == 203 plot_topomap(data, info, res=8) plt.close('all') def test_plot_topomap_nirs_overlap(fnirs_epochs): """Test plotting nirs topomap with overlapping channels (gh-7414).""" fig = fnirs_epochs['A'].average(picks='hbo').plot_topomap() assert len(fig.axes) == 5 plt.close('all') @requires_sklearn def test_plot_topomap_nirs_ica(fnirs_epochs): """Test plotting nirs ica topomap.""" from mne.preprocessing import ICA fnirs_epochs = fnirs_epochs.load_data().pick(picks='hbo') fnirs_epochs = fnirs_epochs.pick(picks=range(30)) ica = ICA().fit(fnirs_epochs) fig = ica.plot_components() assert len(fig[0].axes) == 20 plt.close('all') def test_plot_cov_topomap(): """Test plotting a covariance topomap.""" cov = read_cov(cov_fname) info = read_info(evoked_fname) cov.plot_topomap(info) cov.plot_topomap(info, noise_cov=cov) plt.close('all')
wmvanvliet/mne-python
mne/viz/tests/test_topomap.py
mne/externals/tqdm/_tqdm/_monitor.py
# Authors: Joan Massich <mailsik@gmail.com> # Alexandre Gramfort <alexandre.gramfort@inria.fr> # # License: BSD (3-clause) from collections import OrderedDict import os.path as op import numpy as np from functools import partial import xml.etree.ElementTree as ElementTree from .montage import make_dig_montage from ..transforms import _sph_to_cart from ..utils import warn, _pl from . import __file__ as _CHANNELS_INIT_FILE MONTAGE_PATH = op.join(op.dirname(_CHANNELS_INIT_FILE), 'data', 'montages') _str = 'U100' # In standard_1020, T9=LPA, T10=RPA, Nasion is the same as Iz with a # sign-flipped Y value def _egi_256(head_size): fname = op.join(MONTAGE_PATH, 'EGI_256.csd') montage = _read_csd(fname, head_size) ch_pos = montage._get_ch_pos() # For this cap, the Nasion is the frontmost electrode, # LPA/RPA we approximate by putting 75% of the way (toward the front) # between the two electrodes that are halfway down the ear holes nasion = ch_pos['E31'] lpa = 0.75 * ch_pos['E67'] + 0.25 * ch_pos['E94'] rpa = 0.75 * ch_pos['E219'] + 0.25 * ch_pos['E190'] fids_montage = make_dig_montage( coord_frame='unknown', nasion=nasion, lpa=lpa, rpa=rpa, ) montage += fids_montage # add fiducials to montage return montage def _easycap(basename, head_size): fname = op.join(MONTAGE_PATH, basename) montage = _read_theta_phi_in_degrees(fname, head_size, add_fiducials=True) return montage def _hydrocel(basename, head_size): fname = op.join(MONTAGE_PATH, basename) return _read_sfp(fname, head_size) def _str_names(ch_names): return [str(ch_name) for ch_name in ch_names] def _safe_np_loadtxt(fname, **kwargs): out = np.genfromtxt(fname, **kwargs) ch_names = _str_names(out['f0']) others = tuple(out['f%d' % ii] for ii in range(1, len(out.dtype.fields))) return (ch_names,) + others def _biosemi(basename, head_size): fname = op.join(MONTAGE_PATH, basename) fid_names = ('Nz', 'LPA', 'RPA') return _read_theta_phi_in_degrees(fname, head_size, fid_names) def _mgh_or_standard(basename, head_size): fid_names = ('Nz', 'LPA', 'RPA') fname = op.join(MONTAGE_PATH, basename) ch_names_, pos = [], [] with open(fname) as fid: # Ignore units as we will scale later using the norms anyway for line in fid: if 'Positions\n' in line: break pos = [] for line in fid: if 'Labels\n' in line: break pos.append(list(map(float, line.split()))) for line in fid: if not line or not set(line) - {' '}: break ch_names_.append(line.strip(' ').strip('\n')) pos = np.array(pos) ch_pos = _check_dupes_odict(ch_names_, pos) nasion, lpa, rpa = [ch_pos.pop(n) for n in fid_names] scale = head_size / np.median(np.linalg.norm(pos, axis=1)) for value in ch_pos.values(): value *= scale nasion *= scale lpa *= scale rpa *= scale return make_dig_montage(ch_pos=ch_pos, coord_frame='unknown', nasion=nasion, lpa=lpa, rpa=rpa) standard_montage_look_up_table = { 'EGI_256': _egi_256, 'easycap-M1': partial(_easycap, basename='easycap-M1.txt'), 'easycap-M10': partial(_easycap, basename='easycap-M10.txt'), 'GSN-HydroCel-128': partial(_hydrocel, basename='GSN-HydroCel-128.sfp'), 'GSN-HydroCel-129': partial(_hydrocel, basename='GSN-HydroCel-129.sfp'), 'GSN-HydroCel-256': partial(_hydrocel, basename='GSN-HydroCel-256.sfp'), 'GSN-HydroCel-257': partial(_hydrocel, basename='GSN-HydroCel-257.sfp'), 'GSN-HydroCel-32': partial(_hydrocel, basename='GSN-HydroCel-32.sfp'), 'GSN-HydroCel-64_1.0': partial(_hydrocel, basename='GSN-HydroCel-64_1.0.sfp'), 'GSN-HydroCel-65_1.0': partial(_hydrocel, basename='GSN-HydroCel-65_1.0.sfp'), 'biosemi128': partial(_biosemi, basename='biosemi128.txt'), 'biosemi16': partial(_biosemi, basename='biosemi16.txt'), 'biosemi160': partial(_biosemi, basename='biosemi160.txt'), 'biosemi256': partial(_biosemi, basename='biosemi256.txt'), 'biosemi32': partial(_biosemi, basename='biosemi32.txt'), 'biosemi64': partial(_biosemi, basename='biosemi64.txt'), 'mgh60': partial(_mgh_or_standard, basename='mgh60.elc'), 'mgh70': partial(_mgh_or_standard, basename='mgh70.elc'), 'standard_1005': partial(_mgh_or_standard, basename='standard_1005.elc'), 'standard_1020': partial(_mgh_or_standard, basename='standard_1020.elc'), 'standard_alphabetic': partial(_mgh_or_standard, basename='standard_alphabetic.elc'), 'standard_postfixed': partial(_mgh_or_standard, basename='standard_postfixed.elc'), 'standard_prefixed': partial(_mgh_or_standard, basename='standard_prefixed.elc'), 'standard_primed': partial(_mgh_or_standard, basename='standard_primed.elc'), } def _read_sfp(fname, head_size): """Read .sfp BESA/EGI files.""" # fname has been already checked fid_names = ('FidNz', 'FidT9', 'FidT10') options = dict(dtype=(_str, 'f4', 'f4', 'f4')) ch_names, xs, ys, zs = _safe_np_loadtxt(fname, **options) # deal with "headshape" mask = np.array([ch_name == 'headshape' for ch_name in ch_names], bool) hsp = np.stack([xs[mask], ys[mask], zs[mask]], axis=-1) mask = ~mask pos = np.stack([xs[mask], ys[mask], zs[mask]], axis=-1) ch_names = [ch_name for ch_name, m in zip(ch_names, mask) if m] ch_pos = _check_dupes_odict(ch_names, pos) del xs, ys, zs, ch_names # no one grants that fid names are there. nasion, lpa, rpa = [ch_pos.pop(n, None) for n in fid_names] if head_size is not None: scale = head_size / np.median(np.linalg.norm(pos, axis=-1)) for value in ch_pos.values(): value *= scale nasion = nasion * scale if nasion is not None else None lpa = lpa * scale if lpa is not None else None rpa = rpa * scale if rpa is not None else None return make_dig_montage(ch_pos=ch_pos, coord_frame='unknown', nasion=nasion, rpa=rpa, lpa=lpa, hsp=hsp) def _read_csd(fname, head_size): # Label, Theta, Phi, Radius, X, Y, Z, off sphere surface options = dict(comments='//', dtype=(_str, 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4')) ch_names, _, _, _, xs, ys, zs, _ = _safe_np_loadtxt(fname, **options) pos = np.stack([xs, ys, zs], axis=-1) if head_size is not None: pos *= head_size / np.median(np.linalg.norm(pos, axis=1)) return make_dig_montage(ch_pos=_check_dupes_odict(ch_names, pos)) def _check_dupes_odict(ch_names, pos): """Warn if there are duplicates, then turn to ordered dict.""" ch_names = list(ch_names) dups = OrderedDict((ch_name, ch_names.count(ch_name)) for ch_name in ch_names) dups = OrderedDict((ch_name, count) for ch_name, count in dups.items() if count > 1) n = len(dups) if n: dups = ', '.join( f'{ch_name} ({count})' for ch_name, count in dups.items()) warn(f'Duplicate channel position{_pl(n)} found, the last will be ' f'used for {dups}') return OrderedDict(zip(ch_names, pos)) def _read_elc(fname, head_size): """Read .elc files. Parameters ---------- fname : str File extension is expected to be '.elc'. head_size : float | None The size of the head in [m]. If none, returns the values read from the file with no modification. Returns ------- montage : instance of DigMontage The montage in [m]. """ fid_names = ('Nz', 'LPA', 'RPA') ch_names_, pos = [], [] with open(fname) as fid: # _read_elc does require to detect the units. (see _mgh_or_standard) for line in fid: if 'UnitPosition' in line: units = line.split()[1] scale = dict(m=1., mm=1e-3)[units] break else: raise RuntimeError('Could not detect units in file %s' % fname) for line in fid: if 'Positions\n' in line: break pos = [] for line in fid: if 'Labels\n' in line: break pos.append(list(map(float, line.split()))) for line in fid: if not line or not set(line) - {' '}: break ch_names_.append(line.strip(' ').strip('\n')) pos = np.array(pos) * scale if head_size is not None: pos *= head_size / np.median(np.linalg.norm(pos, axis=1)) ch_pos = _check_dupes_odict(ch_names_, pos) nasion, lpa, rpa = [ch_pos.pop(n, None) for n in fid_names] return make_dig_montage(ch_pos=ch_pos, coord_frame='unknown', nasion=nasion, lpa=lpa, rpa=rpa) def _read_theta_phi_in_degrees(fname, head_size, fid_names=None, add_fiducials=False): ch_names, theta, phi = _safe_np_loadtxt(fname, skip_header=1, dtype=(_str, 'i4', 'i4')) if add_fiducials: # Add fiducials based on 10/20 spherical coordinate definitions # http://chgd.umich.edu/wp-content/uploads/2014/06/ # 10-20_system_positioning.pdf # extrapolated from other sensor coordinates in the Easycap layouts # https://www.easycap.de/wp-content/uploads/2018/02/ # Easycap-Equidistant-Layouts.pdf assert fid_names is None fid_names = ['Nasion', 'LPA', 'RPA'] ch_names.extend(fid_names) theta = np.append(theta, [115, -115, 115]) phi = np.append(phi, [90, 0, 0]) radii = np.full(len(phi), head_size) pos = _sph_to_cart(np.array([radii, np.deg2rad(phi), np.deg2rad(theta)]).T) ch_pos = _check_dupes_odict(ch_names, pos) nasion, lpa, rpa = None, None, None if fid_names is not None: nasion, lpa, rpa = [ch_pos.pop(n, None) for n in fid_names] return make_dig_montage(ch_pos=ch_pos, coord_frame='unknown', nasion=nasion, lpa=lpa, rpa=rpa) def _read_elp_besa(fname, head_size): # This .elp is not the same as polhemus elp. see _read_isotrak_elp_points dtype = np.dtype('S8, S8, f8, f8, f8') try: data = np.loadtxt(fname, dtype=dtype, skip_header=1) except TypeError: data = np.loadtxt(fname, dtype=dtype, skiprows=1) ch_names = data['f1'].astype(str).tolist() az = data['f2'] horiz = data['f3'] radius = np.abs(az / 180.) az = np.deg2rad(np.array([h if a >= 0. else 180 + h for h, a in zip(horiz, az)])) pol = radius * np.pi rad = data['f4'] / 100 pos = _sph_to_cart(np.array([rad, az, pol]).T) if head_size is not None: pos *= head_size / np.median(np.linalg.norm(pos, axis=1)) ch_pos = _check_dupes_odict(ch_names, pos) fid_names = ('Nz', 'LPA', 'RPA') # No one grants that the fid names actually exist. nasion, lpa, rpa = [ch_pos.pop(n, None) for n in fid_names] return make_dig_montage(ch_pos=ch_pos, nasion=nasion, lpa=lpa, rpa=rpa) def _read_brainvision(fname, head_size): # 'BrainVision Electrodes File' format # Based on BrainVision Analyzer coordinate system: Defined between # standard electrode positions: X-axis from T7 to T8, Y-axis from Oz to # Fpz, Z-axis orthogonal from XY-plane through Cz, fit to a sphere if # idealized (when radius=1), specified in millimeters root = ElementTree.parse(fname).getroot() ch_names = [s.text for s in root.findall("./Electrode/Name")] theta = [float(s.text) for s in root.findall("./Electrode/Theta")] pol = np.deg2rad(np.array(theta)) phi = [float(s.text) for s in root.findall("./Electrode/Phi")] az = np.deg2rad(np.array(phi)) rad = [float(s.text) for s in root.findall("./Electrode/Radius")] rad = np.array(rad) # specified in mm pos = _sph_to_cart(np.array([rad, az, pol]).T) if head_size is not None: pos *= head_size / np.median(np.linalg.norm(pos, axis=1)) return make_dig_montage(ch_pos=_check_dupes_odict(ch_names, pos))
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Denis Engemann <denis.engemann@gmail.com> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Eric Larson <larson.eric.d@gmail.com> # Robert Luke <mail@robertluke.net> # # License: Simplified BSD import os.path as op from functools import partial import numpy as np from numpy.testing import assert_array_equal, assert_equal import pytest import matplotlib import matplotlib.pyplot as plt from matplotlib.patches import Circle from mne import (read_evokeds, read_proj, make_fixed_length_events, Epochs, compute_proj_evoked, find_layout, pick_types, create_info, read_cov) from mne.io.proj import make_eeg_average_ref_proj, Projection from mne.io import read_raw_fif, read_info, RawArray from mne.io.constants import FIFF from mne.io.pick import pick_info, channel_indices_by_type from mne.io.compensator import get_current_comp from mne.channels import read_layout, make_dig_montage from mne.datasets import testing from mne.time_frequency.tfr import AverageTFR from mne.viz import plot_evoked_topomap, plot_projs_topomap, topomap from mne.viz.topomap import (_get_pos_outlines, _onselect, plot_topomap, plot_arrowmap, plot_psds_topomap) from mne.viz.utils import _find_peaks, _fake_click from mne.utils import requires_sklearn data_dir = testing.data_path(download=False) subjects_dir = op.join(data_dir, 'subjects') ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg-proj.fif') triux_fname = op.join(data_dir, 'SSS', 'TRIUX', 'triux_bmlhus_erm_raw.fif') base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data') evoked_fname = op.join(base_dir, 'test-ave.fif') raw_fname = op.join(base_dir, 'test_raw.fif') event_name = op.join(base_dir, 'test-eve.fif') ctf_fname = op.join(base_dir, 'test_ctf_comp_raw.fif') layout = read_layout('Vectorview-all') cov_fname = op.join(base_dir, 'test-cov.fif') def test_plot_topomap_interactive(): """Test interactive topomap projection plotting.""" evoked = read_evokeds(evoked_fname, baseline=(None, 0))[0] evoked.pick_types(meg='mag') evoked.info['projs'] = [] assert not evoked.proj evoked.add_proj(compute_proj_evoked(evoked, n_mag=1)) plt.close('all') fig = plt.figure() ax, canvas = fig.gca(), fig.canvas kwargs = dict(vmin=-240, vmax=240, times=[0.1], colorbar=False, axes=ax, res=8, time_unit='s') evoked.copy().plot_topomap(proj=False, **kwargs) canvas.draw() image_noproj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj=True, **kwargs) canvas.draw() image_proj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert not np.array_equal(image_noproj, image_proj) assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj='interactive', **kwargs) canvas.draw() image_interactive = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive) assert not np.array_equal(image_proj, image_interactive) assert len(plt.get_fignums()) == 2 proj_fig = plt.figure(plt.get_fignums()[-1]) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_proj, image_interactive_click) assert not np.array_equal(image_noproj, image_interactive_click) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive_click) assert not np.array_equal(image_proj, image_interactive_click) @testing.requires_testing_data def test_plot_projs_topomap(): """Test plot_projs_topomap.""" projs = read_proj(ecg_fname) info = read_info(raw_fname) fast_test = {"res": 8, "contours": 0, "sensors": False} plot_projs_topomap(projs, info=info, colorbar=True, **fast_test) plt.close('all') ax = plt.subplot(111) projs[3].plot_topomap(info) plot_projs_topomap(projs[:1], info, axes=ax, **fast_test) # test axes plt.close('all') triux_info = read_info(triux_fname) plot_projs_topomap(triux_info['projs'][-1:], triux_info, **fast_test) plt.close('all') plot_projs_topomap(triux_info['projs'][:1], triux_info, **fast_test) plt.close('all') eeg_avg = make_eeg_average_ref_proj(info) eeg_avg.plot_topomap(info, **fast_test) plt.close('all') # test vlims for vlim in ('joint', (-1, 1), (None, 0.5), (0.5, None), (None, None)): plot_projs_topomap(projs[:-1], info, vlim=vlim, colorbar=True) plt.close('all') eeg_proj = make_eeg_average_ref_proj(info) info_meg = pick_info(info, pick_types(info, meg=True, eeg=False)) with pytest.raises(ValueError, match='No channel names in info match p'): plot_projs_topomap([eeg_proj], info_meg) def test_plot_topomap_animation(capsys): """Test topomap plotting.""" # evoked evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) # Test animation _, anim = evoked.animate_topomap(ch_type='grad', times=[0, 0.1], butterfly=False, time_unit='s', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode local to 0' in out plt.close('all') @pytest.mark.filterwarnings('ignore:.*No contour levels.*:UserWarning') def test_plot_topomap_animation_nirs(fnirs_evoked, capsys): """Test topomap plotting for nirs data.""" fig, anim = fnirs_evoked.animate_topomap(ch_type='hbo', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode head to 0' in out assert len(fig.axes) == 2 plt.close('all') @pytest.mark.slowtest def test_plot_topomap_basic(monkeypatch): """Test basics of topomap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) res = 8 fast_test = dict(res=res, contours=0, sensors=False, time_unit='s') fast_test_noscale = dict(res=res, contours=0, sensors=False) ev_bad = evoked.copy().pick_types(meg=False, eeg=True) ev_bad.pick_channels(ev_bad.ch_names[:2]) plt_topomap = partial(ev_bad.plot_topomap, **fast_test) plt_topomap(times=ev_bad.times[:2] - 1e-6) # auto, plots EEG pytest.raises(ValueError, plt_topomap, ch_type='mag') pytest.raises(ValueError, plt_topomap, times=[-100]) # bad time pytest.raises(ValueError, plt_topomap, times=[[0]]) # bad time evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms') # extrapolation to the edges of the convex hull or the head circle evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='local') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head', outlines='skirt') # extrapolation options when < 4 channels: temp_data = np.random.random(3) picks = channel_indices_by_type(evoked.info)['mag'][:3] info_sel = pick_info(evoked.info, picks) plot_topomap(temp_data, info_sel, extrapolate='local', res=res) plot_topomap(temp_data, info_sel, extrapolate='head', res=res) # make sure extrapolation works for 3 channels with border='mean' # (if extra points are placed incorrectly some of them have only # other extra points as neighbours and border='mean' fails) plot_topomap(temp_data, info_sel, extrapolate='local', border='mean', res=res) # border=0 and border='mean': # --------------------------- ch_pos = np.array(sum(([[0, 0, r], [r, 0, 0], [-r, 0, 0], [0, -r, 0], [0, r, 0]] for r in np.linspace(0.2, 1.0, 5)), [])) rng = np.random.RandomState(23) data = np.full(len(ch_pos), 5) + rng.randn(len(ch_pos)) info = create_info(len(ch_pos), 250, 'eeg') ch_pos_dict = {name: pos for name, pos in zip(info['ch_names'], ch_pos)} dig = make_dig_montage(ch_pos_dict, coord_frame='head') info.set_montage(dig) # border=0 ax, _ = plot_topomap(data, info, extrapolate='head', border=0, sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert np.abs(img_data[0, 0]) < 1.5 # border='mean' ax, _ = plot_topomap(data, info, extrapolate='head', border='mean', sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert img_data[0, 0] > 5 # error when not numeric or str: error_msg = 'border must be an instance of numeric or str' with pytest.raises(TypeError, match=error_msg): plot_topomap(data, info, extrapolate='head', border=[1, 2, 3]) # error when str is not 'mean': error_msg = "The only allowed value is 'mean', but got 'fancy' instead." with pytest.raises(ValueError, match=error_msg): plot_topomap(data, info, extrapolate='head', border='fancy') # test channel placement when only 'grad' are picked: # --------------------------------------------------- info_grad = evoked.copy().pick('grad').info n_grads = len(info_grad['ch_names']) data = np.random.randn(n_grads) img, _ = plot_topomap(data, info_grad) # check that channels are scattered around x == 0 pos = img.axes.collections[-1].get_offsets() prop_channels_on_the_right = (pos[:, 0] > 0).mean() assert prop_channels_on_the_right < 0.6 # other: # ------ plt_topomap = partial(evoked.plot_topomap, **fast_test) plt.close('all') axes = [plt.subplot(221), plt.subplot(222)] plt_topomap(axes=axes, colorbar=False) plt.close('all') plt_topomap(times=[-0.1, 0.2]) plt.close('all') evoked_grad = evoked.copy().crop(0, 0).pick_types(meg='grad') mask = np.zeros((204, 1), bool) mask[[0, 3, 5, 6]] = True names = [] def proc_names(x): names.append(x) return x[4:] evoked_grad.plot_topomap(ch_type='grad', times=[0], mask=mask, show_names=proc_names, **fast_test) assert_equal(sorted(names), ['MEG 011x', 'MEG 012x', 'MEG 013x', 'MEG 014x']) mask = np.zeros_like(evoked.data, dtype=bool) mask[[1, 5], :] = True plt_topomap(ch_type='mag', outlines=None) times = [0.1] plt_topomap(times, ch_type='grad', mask=mask) plt_topomap(times, ch_type='planar1') plt_topomap(times, ch_type='planar2') plt_topomap(times, ch_type='grad', mask=mask, show_names=True, mask_params={'marker': 'x'}) plt.close('all') with pytest.raises(ValueError, match='number of seconds; got -'): plt_topomap(times, ch_type='eeg', average=-1e3) with pytest.raises(TypeError, match='number of seconds; got type'): plt_topomap(times, ch_type='eeg', average='x') p = plt_topomap(times, ch_type='grad', image_interp='bilinear', show_names=lambda x: x.replace('MEG', '')) subplot = [x for x in p.get_children() if 'Subplot' in str(type(x))] assert len(subplot) >= 1, [type(x) for x in p.get_children()] subplot = subplot[0] have_all = all('MEG' not in x.get_text() for x in subplot.get_children() if isinstance(x, matplotlib.text.Text)) assert have_all # Plot array for ch_type in ('mag', 'grad'): evoked_ = evoked.copy().pick_types(eeg=False, meg=ch_type) plot_topomap(evoked_.data[:, 0], evoked_.info, **fast_test_noscale) # fail with multiple channel types pytest.raises(ValueError, plot_topomap, evoked.data[0, :], evoked.info) # Test title def get_texts(p): return [x.get_text() for x in p.get_children() if isinstance(x, matplotlib.text.Text)] p = plt_topomap(times, ch_type='eeg', average=0.01) assert_equal(len(get_texts(p)), 0) p = plt_topomap(times, ch_type='eeg', title='Custom') texts = get_texts(p) assert_equal(len(texts), 1) assert_equal(texts[0], 'Custom') plt.close('all') # delaunay triangulation warning plt_topomap(times, ch_type='mag') # projs have already been applied pytest.raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag', proj='interactive', time_unit='s') # change to no-proj mode evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0), proj=False) fig1 = evoked.plot_topomap('interactive', 'mag', proj='interactive', **fast_test) _fake_click(fig1, fig1.axes[1], (0.5, 0.5)) # click slider data_max = np.max(fig1.axes[0].images[0]._A) fig2 = plt.gcf() _fake_click(fig2, fig2.axes[0], (0.075, 0.775)) # toggle projector # make sure projector gets toggled assert (np.max(fig1.axes[0].images[0]._A) != data_max) with monkeypatch.context() as m: # speed it up by not actually plotting m.setattr(topomap, '_plot_topomap', lambda *args, **kwargs: (None, None, None)) with pytest.warns(RuntimeWarning, match='More than 25 topomaps plots'): plot_evoked_topomap(evoked, [0.1] * 26, colorbar=False) pytest.raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6], time_unit='s') for ch in evoked.info['chs']: if ch['coil_type'] == FIFF.FIFFV_COIL_EEG: ch['loc'].fill(0) # Remove extra digitization point, so EEG digitization points # correspond with the EEG electrodes del evoked.info['dig'][85] # Plot skirt evoked.plot_topomap(times, ch_type='eeg', outlines='skirt', **fast_test) # Pass custom outlines without patch eeg_picks = pick_types(evoked.info, meg=False, eeg=True) pos, outlines = _get_pos_outlines(evoked.info, eeg_picks, 0.1) evoked.plot_topomap(times, ch_type='eeg', outlines=outlines, **fast_test) plt.close('all') # Test interactive cmap fig = plot_evoked_topomap(evoked, times=[0., 0.1], ch_type='eeg', cmap=('Reds', True), title='title', **fast_test) fig.canvas.key_press_event('up') fig.canvas.key_press_event(' ') fig.canvas.key_press_event('down') cbar = fig.get_axes()[0].CB # Fake dragging with mouse. ax = cbar.cbar.ax _fake_click(fig, ax, (0.1, 0.1)) _fake_click(fig, ax, (0.1, 0.2), kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') _fake_click(fig, ax, (0.1, 0.1), button=3) _fake_click(fig, ax, (0.1, 0.2), button=3, kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up plt.close('all') # Pass custom outlines with patch callable def patch(): return Circle((0.5, 0.4687), radius=.46, clip_on=True, transform=plt.gca().transAxes) outlines['patch'] = patch plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines, **fast_test) # Remove digitization points. Now topomap should fail evoked.info['dig'] = None pytest.raises(RuntimeError, plot_evoked_topomap, evoked, times, ch_type='eeg', time_unit='s') plt.close('all') # Error for missing names n_channels = len(pos) data = np.ones(n_channels) pytest.raises(ValueError, plot_topomap, data, pos, show_names=True) # Test error messages for invalid pos parameter pos_1d = np.zeros(n_channels) pos_3d = np.zeros((n_channels, 2, 2)) pytest.raises(ValueError, plot_topomap, data, pos_1d) pytest.raises(ValueError, plot_topomap, data, pos_3d) pytest.raises(ValueError, plot_topomap, data, pos[:3, :]) pos_x = pos[:, :1] pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]] pytest.raises(ValueError, plot_topomap, data, pos_x) pytest.raises(ValueError, plot_topomap, data, pos_xyz) # An #channels x 4 matrix should work though. In this case (x, y, width, # height) is assumed. pos_xywh = np.c_[pos, np.zeros((n_channels, 2))] plot_topomap(data, pos_xywh) plt.close('all') # Test peak finder axes = [plt.subplot(131), plt.subplot(132)] evoked.plot_topomap(times='peaks', axes=axes, **fast_test) plt.close('all') evoked.data = np.zeros(evoked.data.shape) evoked.data[50][1] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[1]) evoked.data[80][100] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]]) evoked.data[2][95] = 2 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]]) assert_array_equal(_find_peaks(evoked, 1), evoked.times[95]) # Test excluding bads channels evoked_grad.info['bads'] += [evoked_grad.info['ch_names'][0]] orig_bads = evoked_grad.info['bads'] evoked_grad.plot_topomap(ch_type='grad', times=[0], time_unit='ms') assert_array_equal(evoked_grad.info['bads'], orig_bads) plt.close('all') def test_plot_tfr_topomap(): """Test plotting of TFR data.""" raw = read_raw_fif(raw_fname) times = np.linspace(-0.1, 0.1, 200) res = 8 n_freqs = 3 nave = 1 rng = np.random.RandomState(42) picks = [93, 94, 96, 97, 21, 22, 24, 25, 129, 130, 315, 316, 2, 5, 8, 11] info = pick_info(raw.info, picks) data = rng.randn(len(picks), n_freqs, len(times)) tfr = AverageTFR(info, data, times, np.arange(n_freqs), nave) tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10, res=res, contours=0) eclick = matplotlib.backend_bases.MouseEvent( 'button_press_event', plt.gcf().canvas, 0, 0, 1) eclick.xdata = eclick.ydata = 0.1 eclick.inaxes = plt.gca() erelease = matplotlib.backend_bases.MouseEvent( 'button_release_event', plt.gcf().canvas, 0.9, 0.9, 1) erelease.xdata = 0.3 erelease.ydata = 0.2 pos = np.array([[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]]) _onselect(eclick, erelease, tfr, pos, 'grad', 1, 3, 1, 3, 'RdBu_r', list()) _onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list()) eclick.xdata = eclick.ydata = 0. erelease.xdata = erelease.ydata = 0.9 tfr._onselect(eclick, erelease, None, 'mean', None) plt.close('all') # test plot_psds_topomap info = raw.info.copy() chan_inds = channel_indices_by_type(info) info = pick_info(info, chan_inds['grad'][:4]) fig, axes = plt.subplots() freqs = np.arange(3., 9.5) bands = [(4, 8, 'Theta')] psd = np.random.rand(len(info['ch_names']), freqs.shape[0]) plot_psds_topomap(psd, freqs, info, bands=bands, axes=[axes]) def test_ctf_plotting(): """Test CTF topomap plotting.""" raw = read_raw_fif(ctf_fname, preload=True) assert raw.compensation_grade == 3 events = make_fixed_length_events(raw, duration=0.01) assert len(events) > 10 evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average() assert get_current_comp(evoked.info) == 3 # smoke test that compensation does not matter evoked.plot_topomap(time_unit='s') # better test that topomaps can still be used without plotting ref evoked.pick_types(meg=True, ref_meg=False) evoked.plot_topomap() @pytest.mark.slowtest # can be slow on OSX @testing.requires_testing_data def test_plot_arrowmap(): """Test arrowmap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked.data[:, 0], evoked.info) evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked_eeg.data[:, 0], evoked.info) evoked_mag = evoked.copy().pick_types(meg='mag') evoked_grad = evoked.copy().pick_types(meg='grad') plot_arrowmap(evoked_mag.data[:, 0], evoked_mag.info) plot_arrowmap(evoked_grad.data[:, 0], evoked_grad.info, info_to=evoked_mag.info) @testing.requires_testing_data def test_plot_topomap_neuromag122(): """Test topomap plotting.""" res = 8 fast_test = dict(res=res, contours=0, sensors=False) evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) evoked.pick_types(meg='grad') evoked.pick_channels(evoked.ch_names[:122]) ch_names = ['MEG %03d' % k for k in range(1, 123)] for c in evoked.info['chs']: c['coil_type'] = FIFF.FIFFV_COIL_NM_122 evoked.rename_channels({c_old: c_new for (c_old, c_new) in zip(evoked.ch_names, ch_names)}) layout = find_layout(evoked.info) assert layout.kind.startswith('Neuromag_122') evoked.plot_topomap(times=[0.1], **fast_test) proj = Projection(active=False, desc="test", kind=1, data=dict(nrow=1, ncol=122, row_names=None, col_names=evoked.ch_names, data=np.ones(122)), explained_var=0.5) plot_projs_topomap([proj], evoked.info, **fast_test) def test_plot_topomap_bads(): """Test plotting topomap with bad channels (gh-7213).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(3, 1000) raw = RawArray(data, create_info(3, 1000., 'eeg')) ch_pos_dict = {name: pos for name, pos in zip(raw.ch_names, np.eye(3))} raw.info.set_montage(make_dig_montage(ch_pos_dict, coord_frame='head')) for count in range(3): raw.info['bads'] = raw.ch_names[:count] raw.info._check_consistency() plot_topomap(data[:, 0], raw.info) plt.close('all') def test_plot_topomap_bads_grad(): """Test plotting topomap with bad gradiometer channels (gh-8802).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(203) info = read_info(evoked_fname) info['bads'] = ['MEG 2242'] picks = pick_types(info, meg='grad') info = pick_info(info, picks) assert len(info['chs']) == 203 plot_topomap(data, info, res=8) plt.close('all') def test_plot_topomap_nirs_overlap(fnirs_epochs): """Test plotting nirs topomap with overlapping channels (gh-7414).""" fig = fnirs_epochs['A'].average(picks='hbo').plot_topomap() assert len(fig.axes) == 5 plt.close('all') @requires_sklearn def test_plot_topomap_nirs_ica(fnirs_epochs): """Test plotting nirs ica topomap.""" from mne.preprocessing import ICA fnirs_epochs = fnirs_epochs.load_data().pick(picks='hbo') fnirs_epochs = fnirs_epochs.pick(picks=range(30)) ica = ICA().fit(fnirs_epochs) fig = ica.plot_components() assert len(fig[0].axes) == 20 plt.close('all') def test_plot_cov_topomap(): """Test plotting a covariance topomap.""" cov = read_cov(cov_fname) info = read_info(evoked_fname) cov.plot_topomap(info) cov.plot_topomap(info, noise_cov=cov) plt.close('all')
wmvanvliet/mne-python
mne/viz/tests/test_topomap.py
mne/channels/_standard_montage_utils.py
# -*- coding: utf-8 -*- # Authors: Eric Larson <larson.eric.d@gmail.com> # License: BSD Style. import os import os.path as op from ..utils import _manifest_check_download, _get_path from ...utils import (verbose, get_subjects_dir, set_config) FSAVERAGE_MANIFEST_PATH = op.dirname(__file__) @verbose def fetch_fsaverage(subjects_dir=None, verbose=None): """Fetch and update fsaverage. Parameters ---------- subjects_dir : str | None The path to use as the subjects directory in the MNE-Python config file. None will use the existing config variable (i.e., will not change anything), and if it does not exist, will use ``~/mne_data/MNE-fsaverage-data``. %(verbose)s Returns ------- fs_dir : str The fsaverage directory. (essentially ``subjects_dir + '/fsaverage'``). Notes ----- This function is designed to provide 1. All modern (Freesurfer 6) fsaverage subject files 2. All MNE fsaverage parcellations 3. fsaverage head surface, fiducials, head<->MRI trans, 1- and 3-layer BEMs (and surfaces) This function will compare the contents of ``subjects_dir/fsaverage`` to the ones provided in the remote zip file. If any are missing, the zip file is downloaded and files are updated. No files will be overwritten. .. versionadded:: 0.18 """ # Code used to create the BEM (other files taken from MNE-sample-data): # # $ mne watershed_bem -s fsaverage -d $PWD --verbose info --copy # $ python # >>> bem = mne.make_bem_model('fsaverage', subjects_dir='.', verbose=True) # >>> mne.write_bem_surfaces( # ... 'fsaverage/bem/fsaverage-5120-5120-5120-bem.fif', bem) # >>> sol = mne.make_bem_solution(bem, verbose=True) # >>> mne.write_bem_solution( # ... 'fsaverage/bem/fsaverage-5120-5120-5120-bem-sol.fif', sol) # >>> import os # >>> import os.path as op # >>> names = sorted(op.join(r, f) # ... for r, d, files in os.walk('fsaverage') # ... for f in files) # with open('fsaverage.txt', 'w') as fid: # fid.write('\n'.join(names)) # subjects_dir = _set_montage_coreg_path(subjects_dir) subjects_dir = op.abspath(subjects_dir) fs_dir = op.join(subjects_dir, 'fsaverage') os.makedirs(fs_dir, exist_ok=True) _manifest_check_download( manifest_path=op.join(FSAVERAGE_MANIFEST_PATH, 'root.txt'), destination=op.join(subjects_dir), url='https://osf.io/3bxqt/download?revision=2', hash_='5133fe92b7b8f03ae19219d5f46e4177', ) _manifest_check_download( manifest_path=op.join(FSAVERAGE_MANIFEST_PATH, 'bem.txt'), destination=op.join(subjects_dir, 'fsaverage'), url='https://osf.io/7ve8g/download?revision=4', hash_='b31509cdcf7908af6a83dc5ee8f49fb1', ) return fs_dir def _get_create_subjects_dir(subjects_dir): subjects_dir = get_subjects_dir(subjects_dir, raise_error=False) if subjects_dir is None: subjects_dir = _get_path(None, 'MNE_DATA', 'montage coregistration') subjects_dir = op.join(subjects_dir, 'MNE-fsaverage-data') os.makedirs(subjects_dir, exist_ok=True) return subjects_dir def _set_montage_coreg_path(subjects_dir=None): """Set a subject directory suitable for montage(-only) coregistration. Parameters ---------- subjects_dir : str | None The path to use as the subjects directory in the MNE-Python config file. None will use the existing config variable (i.e., will not change anything), and if it does not exist, will use ``~/mne_data/MNE-fsaverage-data``. Returns ------- subjects_dir : str The subjects directory that was used. See Also -------- mne.datasets.fetch_fsaverage mne.get_config mne.set_config Notes ----- If you plan to only do EEG-montage based coregistrations with fsaverage without any MRI warping, this function can facilitate the process. Essentially it sets the default value for ``subjects_dir`` in MNE functions to be ``~/mne_data/MNE-fsaverage-data`` (assuming it has not already been set to some other value). .. versionadded:: 0.18 """ subjects_dir = _get_create_subjects_dir(subjects_dir) old_subjects_dir = get_subjects_dir(None, raise_error=False) if old_subjects_dir is None: set_config('SUBJECTS_DIR', subjects_dir) return subjects_dir
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Denis Engemann <denis.engemann@gmail.com> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Eric Larson <larson.eric.d@gmail.com> # Robert Luke <mail@robertluke.net> # # License: Simplified BSD import os.path as op from functools import partial import numpy as np from numpy.testing import assert_array_equal, assert_equal import pytest import matplotlib import matplotlib.pyplot as plt from matplotlib.patches import Circle from mne import (read_evokeds, read_proj, make_fixed_length_events, Epochs, compute_proj_evoked, find_layout, pick_types, create_info, read_cov) from mne.io.proj import make_eeg_average_ref_proj, Projection from mne.io import read_raw_fif, read_info, RawArray from mne.io.constants import FIFF from mne.io.pick import pick_info, channel_indices_by_type from mne.io.compensator import get_current_comp from mne.channels import read_layout, make_dig_montage from mne.datasets import testing from mne.time_frequency.tfr import AverageTFR from mne.viz import plot_evoked_topomap, plot_projs_topomap, topomap from mne.viz.topomap import (_get_pos_outlines, _onselect, plot_topomap, plot_arrowmap, plot_psds_topomap) from mne.viz.utils import _find_peaks, _fake_click from mne.utils import requires_sklearn data_dir = testing.data_path(download=False) subjects_dir = op.join(data_dir, 'subjects') ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg-proj.fif') triux_fname = op.join(data_dir, 'SSS', 'TRIUX', 'triux_bmlhus_erm_raw.fif') base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data') evoked_fname = op.join(base_dir, 'test-ave.fif') raw_fname = op.join(base_dir, 'test_raw.fif') event_name = op.join(base_dir, 'test-eve.fif') ctf_fname = op.join(base_dir, 'test_ctf_comp_raw.fif') layout = read_layout('Vectorview-all') cov_fname = op.join(base_dir, 'test-cov.fif') def test_plot_topomap_interactive(): """Test interactive topomap projection plotting.""" evoked = read_evokeds(evoked_fname, baseline=(None, 0))[0] evoked.pick_types(meg='mag') evoked.info['projs'] = [] assert not evoked.proj evoked.add_proj(compute_proj_evoked(evoked, n_mag=1)) plt.close('all') fig = plt.figure() ax, canvas = fig.gca(), fig.canvas kwargs = dict(vmin=-240, vmax=240, times=[0.1], colorbar=False, axes=ax, res=8, time_unit='s') evoked.copy().plot_topomap(proj=False, **kwargs) canvas.draw() image_noproj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj=True, **kwargs) canvas.draw() image_proj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert not np.array_equal(image_noproj, image_proj) assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj='interactive', **kwargs) canvas.draw() image_interactive = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive) assert not np.array_equal(image_proj, image_interactive) assert len(plt.get_fignums()) == 2 proj_fig = plt.figure(plt.get_fignums()[-1]) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_proj, image_interactive_click) assert not np.array_equal(image_noproj, image_interactive_click) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive_click) assert not np.array_equal(image_proj, image_interactive_click) @testing.requires_testing_data def test_plot_projs_topomap(): """Test plot_projs_topomap.""" projs = read_proj(ecg_fname) info = read_info(raw_fname) fast_test = {"res": 8, "contours": 0, "sensors": False} plot_projs_topomap(projs, info=info, colorbar=True, **fast_test) plt.close('all') ax = plt.subplot(111) projs[3].plot_topomap(info) plot_projs_topomap(projs[:1], info, axes=ax, **fast_test) # test axes plt.close('all') triux_info = read_info(triux_fname) plot_projs_topomap(triux_info['projs'][-1:], triux_info, **fast_test) plt.close('all') plot_projs_topomap(triux_info['projs'][:1], triux_info, **fast_test) plt.close('all') eeg_avg = make_eeg_average_ref_proj(info) eeg_avg.plot_topomap(info, **fast_test) plt.close('all') # test vlims for vlim in ('joint', (-1, 1), (None, 0.5), (0.5, None), (None, None)): plot_projs_topomap(projs[:-1], info, vlim=vlim, colorbar=True) plt.close('all') eeg_proj = make_eeg_average_ref_proj(info) info_meg = pick_info(info, pick_types(info, meg=True, eeg=False)) with pytest.raises(ValueError, match='No channel names in info match p'): plot_projs_topomap([eeg_proj], info_meg) def test_plot_topomap_animation(capsys): """Test topomap plotting.""" # evoked evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) # Test animation _, anim = evoked.animate_topomap(ch_type='grad', times=[0, 0.1], butterfly=False, time_unit='s', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode local to 0' in out plt.close('all') @pytest.mark.filterwarnings('ignore:.*No contour levels.*:UserWarning') def test_plot_topomap_animation_nirs(fnirs_evoked, capsys): """Test topomap plotting for nirs data.""" fig, anim = fnirs_evoked.animate_topomap(ch_type='hbo', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode head to 0' in out assert len(fig.axes) == 2 plt.close('all') @pytest.mark.slowtest def test_plot_topomap_basic(monkeypatch): """Test basics of topomap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) res = 8 fast_test = dict(res=res, contours=0, sensors=False, time_unit='s') fast_test_noscale = dict(res=res, contours=0, sensors=False) ev_bad = evoked.copy().pick_types(meg=False, eeg=True) ev_bad.pick_channels(ev_bad.ch_names[:2]) plt_topomap = partial(ev_bad.plot_topomap, **fast_test) plt_topomap(times=ev_bad.times[:2] - 1e-6) # auto, plots EEG pytest.raises(ValueError, plt_topomap, ch_type='mag') pytest.raises(ValueError, plt_topomap, times=[-100]) # bad time pytest.raises(ValueError, plt_topomap, times=[[0]]) # bad time evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms') # extrapolation to the edges of the convex hull or the head circle evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='local') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head', outlines='skirt') # extrapolation options when < 4 channels: temp_data = np.random.random(3) picks = channel_indices_by_type(evoked.info)['mag'][:3] info_sel = pick_info(evoked.info, picks) plot_topomap(temp_data, info_sel, extrapolate='local', res=res) plot_topomap(temp_data, info_sel, extrapolate='head', res=res) # make sure extrapolation works for 3 channels with border='mean' # (if extra points are placed incorrectly some of them have only # other extra points as neighbours and border='mean' fails) plot_topomap(temp_data, info_sel, extrapolate='local', border='mean', res=res) # border=0 and border='mean': # --------------------------- ch_pos = np.array(sum(([[0, 0, r], [r, 0, 0], [-r, 0, 0], [0, -r, 0], [0, r, 0]] for r in np.linspace(0.2, 1.0, 5)), [])) rng = np.random.RandomState(23) data = np.full(len(ch_pos), 5) + rng.randn(len(ch_pos)) info = create_info(len(ch_pos), 250, 'eeg') ch_pos_dict = {name: pos for name, pos in zip(info['ch_names'], ch_pos)} dig = make_dig_montage(ch_pos_dict, coord_frame='head') info.set_montage(dig) # border=0 ax, _ = plot_topomap(data, info, extrapolate='head', border=0, sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert np.abs(img_data[0, 0]) < 1.5 # border='mean' ax, _ = plot_topomap(data, info, extrapolate='head', border='mean', sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert img_data[0, 0] > 5 # error when not numeric or str: error_msg = 'border must be an instance of numeric or str' with pytest.raises(TypeError, match=error_msg): plot_topomap(data, info, extrapolate='head', border=[1, 2, 3]) # error when str is not 'mean': error_msg = "The only allowed value is 'mean', but got 'fancy' instead." with pytest.raises(ValueError, match=error_msg): plot_topomap(data, info, extrapolate='head', border='fancy') # test channel placement when only 'grad' are picked: # --------------------------------------------------- info_grad = evoked.copy().pick('grad').info n_grads = len(info_grad['ch_names']) data = np.random.randn(n_grads) img, _ = plot_topomap(data, info_grad) # check that channels are scattered around x == 0 pos = img.axes.collections[-1].get_offsets() prop_channels_on_the_right = (pos[:, 0] > 0).mean() assert prop_channels_on_the_right < 0.6 # other: # ------ plt_topomap = partial(evoked.plot_topomap, **fast_test) plt.close('all') axes = [plt.subplot(221), plt.subplot(222)] plt_topomap(axes=axes, colorbar=False) plt.close('all') plt_topomap(times=[-0.1, 0.2]) plt.close('all') evoked_grad = evoked.copy().crop(0, 0).pick_types(meg='grad') mask = np.zeros((204, 1), bool) mask[[0, 3, 5, 6]] = True names = [] def proc_names(x): names.append(x) return x[4:] evoked_grad.plot_topomap(ch_type='grad', times=[0], mask=mask, show_names=proc_names, **fast_test) assert_equal(sorted(names), ['MEG 011x', 'MEG 012x', 'MEG 013x', 'MEG 014x']) mask = np.zeros_like(evoked.data, dtype=bool) mask[[1, 5], :] = True plt_topomap(ch_type='mag', outlines=None) times = [0.1] plt_topomap(times, ch_type='grad', mask=mask) plt_topomap(times, ch_type='planar1') plt_topomap(times, ch_type='planar2') plt_topomap(times, ch_type='grad', mask=mask, show_names=True, mask_params={'marker': 'x'}) plt.close('all') with pytest.raises(ValueError, match='number of seconds; got -'): plt_topomap(times, ch_type='eeg', average=-1e3) with pytest.raises(TypeError, match='number of seconds; got type'): plt_topomap(times, ch_type='eeg', average='x') p = plt_topomap(times, ch_type='grad', image_interp='bilinear', show_names=lambda x: x.replace('MEG', '')) subplot = [x for x in p.get_children() if 'Subplot' in str(type(x))] assert len(subplot) >= 1, [type(x) for x in p.get_children()] subplot = subplot[0] have_all = all('MEG' not in x.get_text() for x in subplot.get_children() if isinstance(x, matplotlib.text.Text)) assert have_all # Plot array for ch_type in ('mag', 'grad'): evoked_ = evoked.copy().pick_types(eeg=False, meg=ch_type) plot_topomap(evoked_.data[:, 0], evoked_.info, **fast_test_noscale) # fail with multiple channel types pytest.raises(ValueError, plot_topomap, evoked.data[0, :], evoked.info) # Test title def get_texts(p): return [x.get_text() for x in p.get_children() if isinstance(x, matplotlib.text.Text)] p = plt_topomap(times, ch_type='eeg', average=0.01) assert_equal(len(get_texts(p)), 0) p = plt_topomap(times, ch_type='eeg', title='Custom') texts = get_texts(p) assert_equal(len(texts), 1) assert_equal(texts[0], 'Custom') plt.close('all') # delaunay triangulation warning plt_topomap(times, ch_type='mag') # projs have already been applied pytest.raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag', proj='interactive', time_unit='s') # change to no-proj mode evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0), proj=False) fig1 = evoked.plot_topomap('interactive', 'mag', proj='interactive', **fast_test) _fake_click(fig1, fig1.axes[1], (0.5, 0.5)) # click slider data_max = np.max(fig1.axes[0].images[0]._A) fig2 = plt.gcf() _fake_click(fig2, fig2.axes[0], (0.075, 0.775)) # toggle projector # make sure projector gets toggled assert (np.max(fig1.axes[0].images[0]._A) != data_max) with monkeypatch.context() as m: # speed it up by not actually plotting m.setattr(topomap, '_plot_topomap', lambda *args, **kwargs: (None, None, None)) with pytest.warns(RuntimeWarning, match='More than 25 topomaps plots'): plot_evoked_topomap(evoked, [0.1] * 26, colorbar=False) pytest.raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6], time_unit='s') for ch in evoked.info['chs']: if ch['coil_type'] == FIFF.FIFFV_COIL_EEG: ch['loc'].fill(0) # Remove extra digitization point, so EEG digitization points # correspond with the EEG electrodes del evoked.info['dig'][85] # Plot skirt evoked.plot_topomap(times, ch_type='eeg', outlines='skirt', **fast_test) # Pass custom outlines without patch eeg_picks = pick_types(evoked.info, meg=False, eeg=True) pos, outlines = _get_pos_outlines(evoked.info, eeg_picks, 0.1) evoked.plot_topomap(times, ch_type='eeg', outlines=outlines, **fast_test) plt.close('all') # Test interactive cmap fig = plot_evoked_topomap(evoked, times=[0., 0.1], ch_type='eeg', cmap=('Reds', True), title='title', **fast_test) fig.canvas.key_press_event('up') fig.canvas.key_press_event(' ') fig.canvas.key_press_event('down') cbar = fig.get_axes()[0].CB # Fake dragging with mouse. ax = cbar.cbar.ax _fake_click(fig, ax, (0.1, 0.1)) _fake_click(fig, ax, (0.1, 0.2), kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') _fake_click(fig, ax, (0.1, 0.1), button=3) _fake_click(fig, ax, (0.1, 0.2), button=3, kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up plt.close('all') # Pass custom outlines with patch callable def patch(): return Circle((0.5, 0.4687), radius=.46, clip_on=True, transform=plt.gca().transAxes) outlines['patch'] = patch plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines, **fast_test) # Remove digitization points. Now topomap should fail evoked.info['dig'] = None pytest.raises(RuntimeError, plot_evoked_topomap, evoked, times, ch_type='eeg', time_unit='s') plt.close('all') # Error for missing names n_channels = len(pos) data = np.ones(n_channels) pytest.raises(ValueError, plot_topomap, data, pos, show_names=True) # Test error messages for invalid pos parameter pos_1d = np.zeros(n_channels) pos_3d = np.zeros((n_channels, 2, 2)) pytest.raises(ValueError, plot_topomap, data, pos_1d) pytest.raises(ValueError, plot_topomap, data, pos_3d) pytest.raises(ValueError, plot_topomap, data, pos[:3, :]) pos_x = pos[:, :1] pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]] pytest.raises(ValueError, plot_topomap, data, pos_x) pytest.raises(ValueError, plot_topomap, data, pos_xyz) # An #channels x 4 matrix should work though. In this case (x, y, width, # height) is assumed. pos_xywh = np.c_[pos, np.zeros((n_channels, 2))] plot_topomap(data, pos_xywh) plt.close('all') # Test peak finder axes = [plt.subplot(131), plt.subplot(132)] evoked.plot_topomap(times='peaks', axes=axes, **fast_test) plt.close('all') evoked.data = np.zeros(evoked.data.shape) evoked.data[50][1] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[1]) evoked.data[80][100] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]]) evoked.data[2][95] = 2 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]]) assert_array_equal(_find_peaks(evoked, 1), evoked.times[95]) # Test excluding bads channels evoked_grad.info['bads'] += [evoked_grad.info['ch_names'][0]] orig_bads = evoked_grad.info['bads'] evoked_grad.plot_topomap(ch_type='grad', times=[0], time_unit='ms') assert_array_equal(evoked_grad.info['bads'], orig_bads) plt.close('all') def test_plot_tfr_topomap(): """Test plotting of TFR data.""" raw = read_raw_fif(raw_fname) times = np.linspace(-0.1, 0.1, 200) res = 8 n_freqs = 3 nave = 1 rng = np.random.RandomState(42) picks = [93, 94, 96, 97, 21, 22, 24, 25, 129, 130, 315, 316, 2, 5, 8, 11] info = pick_info(raw.info, picks) data = rng.randn(len(picks), n_freqs, len(times)) tfr = AverageTFR(info, data, times, np.arange(n_freqs), nave) tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10, res=res, contours=0) eclick = matplotlib.backend_bases.MouseEvent( 'button_press_event', plt.gcf().canvas, 0, 0, 1) eclick.xdata = eclick.ydata = 0.1 eclick.inaxes = plt.gca() erelease = matplotlib.backend_bases.MouseEvent( 'button_release_event', plt.gcf().canvas, 0.9, 0.9, 1) erelease.xdata = 0.3 erelease.ydata = 0.2 pos = np.array([[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]]) _onselect(eclick, erelease, tfr, pos, 'grad', 1, 3, 1, 3, 'RdBu_r', list()) _onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list()) eclick.xdata = eclick.ydata = 0. erelease.xdata = erelease.ydata = 0.9 tfr._onselect(eclick, erelease, None, 'mean', None) plt.close('all') # test plot_psds_topomap info = raw.info.copy() chan_inds = channel_indices_by_type(info) info = pick_info(info, chan_inds['grad'][:4]) fig, axes = plt.subplots() freqs = np.arange(3., 9.5) bands = [(4, 8, 'Theta')] psd = np.random.rand(len(info['ch_names']), freqs.shape[0]) plot_psds_topomap(psd, freqs, info, bands=bands, axes=[axes]) def test_ctf_plotting(): """Test CTF topomap plotting.""" raw = read_raw_fif(ctf_fname, preload=True) assert raw.compensation_grade == 3 events = make_fixed_length_events(raw, duration=0.01) assert len(events) > 10 evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average() assert get_current_comp(evoked.info) == 3 # smoke test that compensation does not matter evoked.plot_topomap(time_unit='s') # better test that topomaps can still be used without plotting ref evoked.pick_types(meg=True, ref_meg=False) evoked.plot_topomap() @pytest.mark.slowtest # can be slow on OSX @testing.requires_testing_data def test_plot_arrowmap(): """Test arrowmap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked.data[:, 0], evoked.info) evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked_eeg.data[:, 0], evoked.info) evoked_mag = evoked.copy().pick_types(meg='mag') evoked_grad = evoked.copy().pick_types(meg='grad') plot_arrowmap(evoked_mag.data[:, 0], evoked_mag.info) plot_arrowmap(evoked_grad.data[:, 0], evoked_grad.info, info_to=evoked_mag.info) @testing.requires_testing_data def test_plot_topomap_neuromag122(): """Test topomap plotting.""" res = 8 fast_test = dict(res=res, contours=0, sensors=False) evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) evoked.pick_types(meg='grad') evoked.pick_channels(evoked.ch_names[:122]) ch_names = ['MEG %03d' % k for k in range(1, 123)] for c in evoked.info['chs']: c['coil_type'] = FIFF.FIFFV_COIL_NM_122 evoked.rename_channels({c_old: c_new for (c_old, c_new) in zip(evoked.ch_names, ch_names)}) layout = find_layout(evoked.info) assert layout.kind.startswith('Neuromag_122') evoked.plot_topomap(times=[0.1], **fast_test) proj = Projection(active=False, desc="test", kind=1, data=dict(nrow=1, ncol=122, row_names=None, col_names=evoked.ch_names, data=np.ones(122)), explained_var=0.5) plot_projs_topomap([proj], evoked.info, **fast_test) def test_plot_topomap_bads(): """Test plotting topomap with bad channels (gh-7213).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(3, 1000) raw = RawArray(data, create_info(3, 1000., 'eeg')) ch_pos_dict = {name: pos for name, pos in zip(raw.ch_names, np.eye(3))} raw.info.set_montage(make_dig_montage(ch_pos_dict, coord_frame='head')) for count in range(3): raw.info['bads'] = raw.ch_names[:count] raw.info._check_consistency() plot_topomap(data[:, 0], raw.info) plt.close('all') def test_plot_topomap_bads_grad(): """Test plotting topomap with bad gradiometer channels (gh-8802).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(203) info = read_info(evoked_fname) info['bads'] = ['MEG 2242'] picks = pick_types(info, meg='grad') info = pick_info(info, picks) assert len(info['chs']) == 203 plot_topomap(data, info, res=8) plt.close('all') def test_plot_topomap_nirs_overlap(fnirs_epochs): """Test plotting nirs topomap with overlapping channels (gh-7414).""" fig = fnirs_epochs['A'].average(picks='hbo').plot_topomap() assert len(fig.axes) == 5 plt.close('all') @requires_sklearn def test_plot_topomap_nirs_ica(fnirs_epochs): """Test plotting nirs ica topomap.""" from mne.preprocessing import ICA fnirs_epochs = fnirs_epochs.load_data().pick(picks='hbo') fnirs_epochs = fnirs_epochs.pick(picks=range(30)) ica = ICA().fit(fnirs_epochs) fig = ica.plot_components() assert len(fig[0].axes) == 20 plt.close('all') def test_plot_cov_topomap(): """Test plotting a covariance topomap.""" cov = read_cov(cov_fname) info = read_info(evoked_fname) cov.plot_topomap(info) cov.plot_topomap(info, noise_cov=cov) plt.close('all')
wmvanvliet/mne-python
mne/viz/tests/test_topomap.py
mne/datasets/_fsaverage/base.py
"""Brainstorm datasets.""" from . import (bst_raw, bst_resting, bst_auditory, bst_phantom_ctf, bst_phantom_elekta)
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Denis Engemann <denis.engemann@gmail.com> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Eric Larson <larson.eric.d@gmail.com> # Robert Luke <mail@robertluke.net> # # License: Simplified BSD import os.path as op from functools import partial import numpy as np from numpy.testing import assert_array_equal, assert_equal import pytest import matplotlib import matplotlib.pyplot as plt from matplotlib.patches import Circle from mne import (read_evokeds, read_proj, make_fixed_length_events, Epochs, compute_proj_evoked, find_layout, pick_types, create_info, read_cov) from mne.io.proj import make_eeg_average_ref_proj, Projection from mne.io import read_raw_fif, read_info, RawArray from mne.io.constants import FIFF from mne.io.pick import pick_info, channel_indices_by_type from mne.io.compensator import get_current_comp from mne.channels import read_layout, make_dig_montage from mne.datasets import testing from mne.time_frequency.tfr import AverageTFR from mne.viz import plot_evoked_topomap, plot_projs_topomap, topomap from mne.viz.topomap import (_get_pos_outlines, _onselect, plot_topomap, plot_arrowmap, plot_psds_topomap) from mne.viz.utils import _find_peaks, _fake_click from mne.utils import requires_sklearn data_dir = testing.data_path(download=False) subjects_dir = op.join(data_dir, 'subjects') ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg-proj.fif') triux_fname = op.join(data_dir, 'SSS', 'TRIUX', 'triux_bmlhus_erm_raw.fif') base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data') evoked_fname = op.join(base_dir, 'test-ave.fif') raw_fname = op.join(base_dir, 'test_raw.fif') event_name = op.join(base_dir, 'test-eve.fif') ctf_fname = op.join(base_dir, 'test_ctf_comp_raw.fif') layout = read_layout('Vectorview-all') cov_fname = op.join(base_dir, 'test-cov.fif') def test_plot_topomap_interactive(): """Test interactive topomap projection plotting.""" evoked = read_evokeds(evoked_fname, baseline=(None, 0))[0] evoked.pick_types(meg='mag') evoked.info['projs'] = [] assert not evoked.proj evoked.add_proj(compute_proj_evoked(evoked, n_mag=1)) plt.close('all') fig = plt.figure() ax, canvas = fig.gca(), fig.canvas kwargs = dict(vmin=-240, vmax=240, times=[0.1], colorbar=False, axes=ax, res=8, time_unit='s') evoked.copy().plot_topomap(proj=False, **kwargs) canvas.draw() image_noproj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj=True, **kwargs) canvas.draw() image_proj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert not np.array_equal(image_noproj, image_proj) assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj='interactive', **kwargs) canvas.draw() image_interactive = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive) assert not np.array_equal(image_proj, image_interactive) assert len(plt.get_fignums()) == 2 proj_fig = plt.figure(plt.get_fignums()[-1]) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_proj, image_interactive_click) assert not np.array_equal(image_noproj, image_interactive_click) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive_click) assert not np.array_equal(image_proj, image_interactive_click) @testing.requires_testing_data def test_plot_projs_topomap(): """Test plot_projs_topomap.""" projs = read_proj(ecg_fname) info = read_info(raw_fname) fast_test = {"res": 8, "contours": 0, "sensors": False} plot_projs_topomap(projs, info=info, colorbar=True, **fast_test) plt.close('all') ax = plt.subplot(111) projs[3].plot_topomap(info) plot_projs_topomap(projs[:1], info, axes=ax, **fast_test) # test axes plt.close('all') triux_info = read_info(triux_fname) plot_projs_topomap(triux_info['projs'][-1:], triux_info, **fast_test) plt.close('all') plot_projs_topomap(triux_info['projs'][:1], triux_info, **fast_test) plt.close('all') eeg_avg = make_eeg_average_ref_proj(info) eeg_avg.plot_topomap(info, **fast_test) plt.close('all') # test vlims for vlim in ('joint', (-1, 1), (None, 0.5), (0.5, None), (None, None)): plot_projs_topomap(projs[:-1], info, vlim=vlim, colorbar=True) plt.close('all') eeg_proj = make_eeg_average_ref_proj(info) info_meg = pick_info(info, pick_types(info, meg=True, eeg=False)) with pytest.raises(ValueError, match='No channel names in info match p'): plot_projs_topomap([eeg_proj], info_meg) def test_plot_topomap_animation(capsys): """Test topomap plotting.""" # evoked evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) # Test animation _, anim = evoked.animate_topomap(ch_type='grad', times=[0, 0.1], butterfly=False, time_unit='s', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode local to 0' in out plt.close('all') @pytest.mark.filterwarnings('ignore:.*No contour levels.*:UserWarning') def test_plot_topomap_animation_nirs(fnirs_evoked, capsys): """Test topomap plotting for nirs data.""" fig, anim = fnirs_evoked.animate_topomap(ch_type='hbo', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode head to 0' in out assert len(fig.axes) == 2 plt.close('all') @pytest.mark.slowtest def test_plot_topomap_basic(monkeypatch): """Test basics of topomap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) res = 8 fast_test = dict(res=res, contours=0, sensors=False, time_unit='s') fast_test_noscale = dict(res=res, contours=0, sensors=False) ev_bad = evoked.copy().pick_types(meg=False, eeg=True) ev_bad.pick_channels(ev_bad.ch_names[:2]) plt_topomap = partial(ev_bad.plot_topomap, **fast_test) plt_topomap(times=ev_bad.times[:2] - 1e-6) # auto, plots EEG pytest.raises(ValueError, plt_topomap, ch_type='mag') pytest.raises(ValueError, plt_topomap, times=[-100]) # bad time pytest.raises(ValueError, plt_topomap, times=[[0]]) # bad time evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms') # extrapolation to the edges of the convex hull or the head circle evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='local') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head', outlines='skirt') # extrapolation options when < 4 channels: temp_data = np.random.random(3) picks = channel_indices_by_type(evoked.info)['mag'][:3] info_sel = pick_info(evoked.info, picks) plot_topomap(temp_data, info_sel, extrapolate='local', res=res) plot_topomap(temp_data, info_sel, extrapolate='head', res=res) # make sure extrapolation works for 3 channels with border='mean' # (if extra points are placed incorrectly some of them have only # other extra points as neighbours and border='mean' fails) plot_topomap(temp_data, info_sel, extrapolate='local', border='mean', res=res) # border=0 and border='mean': # --------------------------- ch_pos = np.array(sum(([[0, 0, r], [r, 0, 0], [-r, 0, 0], [0, -r, 0], [0, r, 0]] for r in np.linspace(0.2, 1.0, 5)), [])) rng = np.random.RandomState(23) data = np.full(len(ch_pos), 5) + rng.randn(len(ch_pos)) info = create_info(len(ch_pos), 250, 'eeg') ch_pos_dict = {name: pos for name, pos in zip(info['ch_names'], ch_pos)} dig = make_dig_montage(ch_pos_dict, coord_frame='head') info.set_montage(dig) # border=0 ax, _ = plot_topomap(data, info, extrapolate='head', border=0, sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert np.abs(img_data[0, 0]) < 1.5 # border='mean' ax, _ = plot_topomap(data, info, extrapolate='head', border='mean', sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert img_data[0, 0] > 5 # error when not numeric or str: error_msg = 'border must be an instance of numeric or str' with pytest.raises(TypeError, match=error_msg): plot_topomap(data, info, extrapolate='head', border=[1, 2, 3]) # error when str is not 'mean': error_msg = "The only allowed value is 'mean', but got 'fancy' instead." with pytest.raises(ValueError, match=error_msg): plot_topomap(data, info, extrapolate='head', border='fancy') # test channel placement when only 'grad' are picked: # --------------------------------------------------- info_grad = evoked.copy().pick('grad').info n_grads = len(info_grad['ch_names']) data = np.random.randn(n_grads) img, _ = plot_topomap(data, info_grad) # check that channels are scattered around x == 0 pos = img.axes.collections[-1].get_offsets() prop_channels_on_the_right = (pos[:, 0] > 0).mean() assert prop_channels_on_the_right < 0.6 # other: # ------ plt_topomap = partial(evoked.plot_topomap, **fast_test) plt.close('all') axes = [plt.subplot(221), plt.subplot(222)] plt_topomap(axes=axes, colorbar=False) plt.close('all') plt_topomap(times=[-0.1, 0.2]) plt.close('all') evoked_grad = evoked.copy().crop(0, 0).pick_types(meg='grad') mask = np.zeros((204, 1), bool) mask[[0, 3, 5, 6]] = True names = [] def proc_names(x): names.append(x) return x[4:] evoked_grad.plot_topomap(ch_type='grad', times=[0], mask=mask, show_names=proc_names, **fast_test) assert_equal(sorted(names), ['MEG 011x', 'MEG 012x', 'MEG 013x', 'MEG 014x']) mask = np.zeros_like(evoked.data, dtype=bool) mask[[1, 5], :] = True plt_topomap(ch_type='mag', outlines=None) times = [0.1] plt_topomap(times, ch_type='grad', mask=mask) plt_topomap(times, ch_type='planar1') plt_topomap(times, ch_type='planar2') plt_topomap(times, ch_type='grad', mask=mask, show_names=True, mask_params={'marker': 'x'}) plt.close('all') with pytest.raises(ValueError, match='number of seconds; got -'): plt_topomap(times, ch_type='eeg', average=-1e3) with pytest.raises(TypeError, match='number of seconds; got type'): plt_topomap(times, ch_type='eeg', average='x') p = plt_topomap(times, ch_type='grad', image_interp='bilinear', show_names=lambda x: x.replace('MEG', '')) subplot = [x for x in p.get_children() if 'Subplot' in str(type(x))] assert len(subplot) >= 1, [type(x) for x in p.get_children()] subplot = subplot[0] have_all = all('MEG' not in x.get_text() for x in subplot.get_children() if isinstance(x, matplotlib.text.Text)) assert have_all # Plot array for ch_type in ('mag', 'grad'): evoked_ = evoked.copy().pick_types(eeg=False, meg=ch_type) plot_topomap(evoked_.data[:, 0], evoked_.info, **fast_test_noscale) # fail with multiple channel types pytest.raises(ValueError, plot_topomap, evoked.data[0, :], evoked.info) # Test title def get_texts(p): return [x.get_text() for x in p.get_children() if isinstance(x, matplotlib.text.Text)] p = plt_topomap(times, ch_type='eeg', average=0.01) assert_equal(len(get_texts(p)), 0) p = plt_topomap(times, ch_type='eeg', title='Custom') texts = get_texts(p) assert_equal(len(texts), 1) assert_equal(texts[0], 'Custom') plt.close('all') # delaunay triangulation warning plt_topomap(times, ch_type='mag') # projs have already been applied pytest.raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag', proj='interactive', time_unit='s') # change to no-proj mode evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0), proj=False) fig1 = evoked.plot_topomap('interactive', 'mag', proj='interactive', **fast_test) _fake_click(fig1, fig1.axes[1], (0.5, 0.5)) # click slider data_max = np.max(fig1.axes[0].images[0]._A) fig2 = plt.gcf() _fake_click(fig2, fig2.axes[0], (0.075, 0.775)) # toggle projector # make sure projector gets toggled assert (np.max(fig1.axes[0].images[0]._A) != data_max) with monkeypatch.context() as m: # speed it up by not actually plotting m.setattr(topomap, '_plot_topomap', lambda *args, **kwargs: (None, None, None)) with pytest.warns(RuntimeWarning, match='More than 25 topomaps plots'): plot_evoked_topomap(evoked, [0.1] * 26, colorbar=False) pytest.raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6], time_unit='s') for ch in evoked.info['chs']: if ch['coil_type'] == FIFF.FIFFV_COIL_EEG: ch['loc'].fill(0) # Remove extra digitization point, so EEG digitization points # correspond with the EEG electrodes del evoked.info['dig'][85] # Plot skirt evoked.plot_topomap(times, ch_type='eeg', outlines='skirt', **fast_test) # Pass custom outlines without patch eeg_picks = pick_types(evoked.info, meg=False, eeg=True) pos, outlines = _get_pos_outlines(evoked.info, eeg_picks, 0.1) evoked.plot_topomap(times, ch_type='eeg', outlines=outlines, **fast_test) plt.close('all') # Test interactive cmap fig = plot_evoked_topomap(evoked, times=[0., 0.1], ch_type='eeg', cmap=('Reds', True), title='title', **fast_test) fig.canvas.key_press_event('up') fig.canvas.key_press_event(' ') fig.canvas.key_press_event('down') cbar = fig.get_axes()[0].CB # Fake dragging with mouse. ax = cbar.cbar.ax _fake_click(fig, ax, (0.1, 0.1)) _fake_click(fig, ax, (0.1, 0.2), kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') _fake_click(fig, ax, (0.1, 0.1), button=3) _fake_click(fig, ax, (0.1, 0.2), button=3, kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up plt.close('all') # Pass custom outlines with patch callable def patch(): return Circle((0.5, 0.4687), radius=.46, clip_on=True, transform=plt.gca().transAxes) outlines['patch'] = patch plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines, **fast_test) # Remove digitization points. Now topomap should fail evoked.info['dig'] = None pytest.raises(RuntimeError, plot_evoked_topomap, evoked, times, ch_type='eeg', time_unit='s') plt.close('all') # Error for missing names n_channels = len(pos) data = np.ones(n_channels) pytest.raises(ValueError, plot_topomap, data, pos, show_names=True) # Test error messages for invalid pos parameter pos_1d = np.zeros(n_channels) pos_3d = np.zeros((n_channels, 2, 2)) pytest.raises(ValueError, plot_topomap, data, pos_1d) pytest.raises(ValueError, plot_topomap, data, pos_3d) pytest.raises(ValueError, plot_topomap, data, pos[:3, :]) pos_x = pos[:, :1] pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]] pytest.raises(ValueError, plot_topomap, data, pos_x) pytest.raises(ValueError, plot_topomap, data, pos_xyz) # An #channels x 4 matrix should work though. In this case (x, y, width, # height) is assumed. pos_xywh = np.c_[pos, np.zeros((n_channels, 2))] plot_topomap(data, pos_xywh) plt.close('all') # Test peak finder axes = [plt.subplot(131), plt.subplot(132)] evoked.plot_topomap(times='peaks', axes=axes, **fast_test) plt.close('all') evoked.data = np.zeros(evoked.data.shape) evoked.data[50][1] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[1]) evoked.data[80][100] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]]) evoked.data[2][95] = 2 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]]) assert_array_equal(_find_peaks(evoked, 1), evoked.times[95]) # Test excluding bads channels evoked_grad.info['bads'] += [evoked_grad.info['ch_names'][0]] orig_bads = evoked_grad.info['bads'] evoked_grad.plot_topomap(ch_type='grad', times=[0], time_unit='ms') assert_array_equal(evoked_grad.info['bads'], orig_bads) plt.close('all') def test_plot_tfr_topomap(): """Test plotting of TFR data.""" raw = read_raw_fif(raw_fname) times = np.linspace(-0.1, 0.1, 200) res = 8 n_freqs = 3 nave = 1 rng = np.random.RandomState(42) picks = [93, 94, 96, 97, 21, 22, 24, 25, 129, 130, 315, 316, 2, 5, 8, 11] info = pick_info(raw.info, picks) data = rng.randn(len(picks), n_freqs, len(times)) tfr = AverageTFR(info, data, times, np.arange(n_freqs), nave) tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10, res=res, contours=0) eclick = matplotlib.backend_bases.MouseEvent( 'button_press_event', plt.gcf().canvas, 0, 0, 1) eclick.xdata = eclick.ydata = 0.1 eclick.inaxes = plt.gca() erelease = matplotlib.backend_bases.MouseEvent( 'button_release_event', plt.gcf().canvas, 0.9, 0.9, 1) erelease.xdata = 0.3 erelease.ydata = 0.2 pos = np.array([[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]]) _onselect(eclick, erelease, tfr, pos, 'grad', 1, 3, 1, 3, 'RdBu_r', list()) _onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list()) eclick.xdata = eclick.ydata = 0. erelease.xdata = erelease.ydata = 0.9 tfr._onselect(eclick, erelease, None, 'mean', None) plt.close('all') # test plot_psds_topomap info = raw.info.copy() chan_inds = channel_indices_by_type(info) info = pick_info(info, chan_inds['grad'][:4]) fig, axes = plt.subplots() freqs = np.arange(3., 9.5) bands = [(4, 8, 'Theta')] psd = np.random.rand(len(info['ch_names']), freqs.shape[0]) plot_psds_topomap(psd, freqs, info, bands=bands, axes=[axes]) def test_ctf_plotting(): """Test CTF topomap plotting.""" raw = read_raw_fif(ctf_fname, preload=True) assert raw.compensation_grade == 3 events = make_fixed_length_events(raw, duration=0.01) assert len(events) > 10 evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average() assert get_current_comp(evoked.info) == 3 # smoke test that compensation does not matter evoked.plot_topomap(time_unit='s') # better test that topomaps can still be used without plotting ref evoked.pick_types(meg=True, ref_meg=False) evoked.plot_topomap() @pytest.mark.slowtest # can be slow on OSX @testing.requires_testing_data def test_plot_arrowmap(): """Test arrowmap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked.data[:, 0], evoked.info) evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked_eeg.data[:, 0], evoked.info) evoked_mag = evoked.copy().pick_types(meg='mag') evoked_grad = evoked.copy().pick_types(meg='grad') plot_arrowmap(evoked_mag.data[:, 0], evoked_mag.info) plot_arrowmap(evoked_grad.data[:, 0], evoked_grad.info, info_to=evoked_mag.info) @testing.requires_testing_data def test_plot_topomap_neuromag122(): """Test topomap plotting.""" res = 8 fast_test = dict(res=res, contours=0, sensors=False) evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) evoked.pick_types(meg='grad') evoked.pick_channels(evoked.ch_names[:122]) ch_names = ['MEG %03d' % k for k in range(1, 123)] for c in evoked.info['chs']: c['coil_type'] = FIFF.FIFFV_COIL_NM_122 evoked.rename_channels({c_old: c_new for (c_old, c_new) in zip(evoked.ch_names, ch_names)}) layout = find_layout(evoked.info) assert layout.kind.startswith('Neuromag_122') evoked.plot_topomap(times=[0.1], **fast_test) proj = Projection(active=False, desc="test", kind=1, data=dict(nrow=1, ncol=122, row_names=None, col_names=evoked.ch_names, data=np.ones(122)), explained_var=0.5) plot_projs_topomap([proj], evoked.info, **fast_test) def test_plot_topomap_bads(): """Test plotting topomap with bad channels (gh-7213).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(3, 1000) raw = RawArray(data, create_info(3, 1000., 'eeg')) ch_pos_dict = {name: pos for name, pos in zip(raw.ch_names, np.eye(3))} raw.info.set_montage(make_dig_montage(ch_pos_dict, coord_frame='head')) for count in range(3): raw.info['bads'] = raw.ch_names[:count] raw.info._check_consistency() plot_topomap(data[:, 0], raw.info) plt.close('all') def test_plot_topomap_bads_grad(): """Test plotting topomap with bad gradiometer channels (gh-8802).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(203) info = read_info(evoked_fname) info['bads'] = ['MEG 2242'] picks = pick_types(info, meg='grad') info = pick_info(info, picks) assert len(info['chs']) == 203 plot_topomap(data, info, res=8) plt.close('all') def test_plot_topomap_nirs_overlap(fnirs_epochs): """Test plotting nirs topomap with overlapping channels (gh-7414).""" fig = fnirs_epochs['A'].average(picks='hbo').plot_topomap() assert len(fig.axes) == 5 plt.close('all') @requires_sklearn def test_plot_topomap_nirs_ica(fnirs_epochs): """Test plotting nirs ica topomap.""" from mne.preprocessing import ICA fnirs_epochs = fnirs_epochs.load_data().pick(picks='hbo') fnirs_epochs = fnirs_epochs.pick(picks=range(30)) ica = ICA().fit(fnirs_epochs) fig = ica.plot_components() assert len(fig[0].axes) == 20 plt.close('all') def test_plot_cov_topomap(): """Test plotting a covariance topomap.""" cov = read_cov(cov_fname) info = read_info(evoked_fname) cov.plot_topomap(info) cov.plot_topomap(info, noise_cov=cov) plt.close('all')
wmvanvliet/mne-python
mne/viz/tests/test_topomap.py
mne/datasets/brainstorm/__init__.py
#!/usr/bin/env python """Compare FIFF files. Examples -------- .. code-block:: console $ mne compare_fiff test_raw.fif test_raw_sss.fif """ # Authors : Eric Larson, PhD import sys import mne def run(): """Run command.""" parser = mne.commands.utils.get_optparser( __file__, usage='mne compare_fiff <file_a> <file_b>') options, args = parser.parse_args() if len(args) != 2: parser.print_help() sys.exit(1) mne.viz.compare_fiff(args[0], args[1]) mne.utils.run_command_if_main()
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Denis Engemann <denis.engemann@gmail.com> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Eric Larson <larson.eric.d@gmail.com> # Robert Luke <mail@robertluke.net> # # License: Simplified BSD import os.path as op from functools import partial import numpy as np from numpy.testing import assert_array_equal, assert_equal import pytest import matplotlib import matplotlib.pyplot as plt from matplotlib.patches import Circle from mne import (read_evokeds, read_proj, make_fixed_length_events, Epochs, compute_proj_evoked, find_layout, pick_types, create_info, read_cov) from mne.io.proj import make_eeg_average_ref_proj, Projection from mne.io import read_raw_fif, read_info, RawArray from mne.io.constants import FIFF from mne.io.pick import pick_info, channel_indices_by_type from mne.io.compensator import get_current_comp from mne.channels import read_layout, make_dig_montage from mne.datasets import testing from mne.time_frequency.tfr import AverageTFR from mne.viz import plot_evoked_topomap, plot_projs_topomap, topomap from mne.viz.topomap import (_get_pos_outlines, _onselect, plot_topomap, plot_arrowmap, plot_psds_topomap) from mne.viz.utils import _find_peaks, _fake_click from mne.utils import requires_sklearn data_dir = testing.data_path(download=False) subjects_dir = op.join(data_dir, 'subjects') ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg-proj.fif') triux_fname = op.join(data_dir, 'SSS', 'TRIUX', 'triux_bmlhus_erm_raw.fif') base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data') evoked_fname = op.join(base_dir, 'test-ave.fif') raw_fname = op.join(base_dir, 'test_raw.fif') event_name = op.join(base_dir, 'test-eve.fif') ctf_fname = op.join(base_dir, 'test_ctf_comp_raw.fif') layout = read_layout('Vectorview-all') cov_fname = op.join(base_dir, 'test-cov.fif') def test_plot_topomap_interactive(): """Test interactive topomap projection plotting.""" evoked = read_evokeds(evoked_fname, baseline=(None, 0))[0] evoked.pick_types(meg='mag') evoked.info['projs'] = [] assert not evoked.proj evoked.add_proj(compute_proj_evoked(evoked, n_mag=1)) plt.close('all') fig = plt.figure() ax, canvas = fig.gca(), fig.canvas kwargs = dict(vmin=-240, vmax=240, times=[0.1], colorbar=False, axes=ax, res=8, time_unit='s') evoked.copy().plot_topomap(proj=False, **kwargs) canvas.draw() image_noproj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj=True, **kwargs) canvas.draw() image_proj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert not np.array_equal(image_noproj, image_proj) assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj='interactive', **kwargs) canvas.draw() image_interactive = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive) assert not np.array_equal(image_proj, image_interactive) assert len(plt.get_fignums()) == 2 proj_fig = plt.figure(plt.get_fignums()[-1]) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_proj, image_interactive_click) assert not np.array_equal(image_noproj, image_interactive_click) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive_click) assert not np.array_equal(image_proj, image_interactive_click) @testing.requires_testing_data def test_plot_projs_topomap(): """Test plot_projs_topomap.""" projs = read_proj(ecg_fname) info = read_info(raw_fname) fast_test = {"res": 8, "contours": 0, "sensors": False} plot_projs_topomap(projs, info=info, colorbar=True, **fast_test) plt.close('all') ax = plt.subplot(111) projs[3].plot_topomap(info) plot_projs_topomap(projs[:1], info, axes=ax, **fast_test) # test axes plt.close('all') triux_info = read_info(triux_fname) plot_projs_topomap(triux_info['projs'][-1:], triux_info, **fast_test) plt.close('all') plot_projs_topomap(triux_info['projs'][:1], triux_info, **fast_test) plt.close('all') eeg_avg = make_eeg_average_ref_proj(info) eeg_avg.plot_topomap(info, **fast_test) plt.close('all') # test vlims for vlim in ('joint', (-1, 1), (None, 0.5), (0.5, None), (None, None)): plot_projs_topomap(projs[:-1], info, vlim=vlim, colorbar=True) plt.close('all') eeg_proj = make_eeg_average_ref_proj(info) info_meg = pick_info(info, pick_types(info, meg=True, eeg=False)) with pytest.raises(ValueError, match='No channel names in info match p'): plot_projs_topomap([eeg_proj], info_meg) def test_plot_topomap_animation(capsys): """Test topomap plotting.""" # evoked evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) # Test animation _, anim = evoked.animate_topomap(ch_type='grad', times=[0, 0.1], butterfly=False, time_unit='s', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode local to 0' in out plt.close('all') @pytest.mark.filterwarnings('ignore:.*No contour levels.*:UserWarning') def test_plot_topomap_animation_nirs(fnirs_evoked, capsys): """Test topomap plotting for nirs data.""" fig, anim = fnirs_evoked.animate_topomap(ch_type='hbo', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode head to 0' in out assert len(fig.axes) == 2 plt.close('all') @pytest.mark.slowtest def test_plot_topomap_basic(monkeypatch): """Test basics of topomap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) res = 8 fast_test = dict(res=res, contours=0, sensors=False, time_unit='s') fast_test_noscale = dict(res=res, contours=0, sensors=False) ev_bad = evoked.copy().pick_types(meg=False, eeg=True) ev_bad.pick_channels(ev_bad.ch_names[:2]) plt_topomap = partial(ev_bad.plot_topomap, **fast_test) plt_topomap(times=ev_bad.times[:2] - 1e-6) # auto, plots EEG pytest.raises(ValueError, plt_topomap, ch_type='mag') pytest.raises(ValueError, plt_topomap, times=[-100]) # bad time pytest.raises(ValueError, plt_topomap, times=[[0]]) # bad time evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms') # extrapolation to the edges of the convex hull or the head circle evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='local') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head', outlines='skirt') # extrapolation options when < 4 channels: temp_data = np.random.random(3) picks = channel_indices_by_type(evoked.info)['mag'][:3] info_sel = pick_info(evoked.info, picks) plot_topomap(temp_data, info_sel, extrapolate='local', res=res) plot_topomap(temp_data, info_sel, extrapolate='head', res=res) # make sure extrapolation works for 3 channels with border='mean' # (if extra points are placed incorrectly some of them have only # other extra points as neighbours and border='mean' fails) plot_topomap(temp_data, info_sel, extrapolate='local', border='mean', res=res) # border=0 and border='mean': # --------------------------- ch_pos = np.array(sum(([[0, 0, r], [r, 0, 0], [-r, 0, 0], [0, -r, 0], [0, r, 0]] for r in np.linspace(0.2, 1.0, 5)), [])) rng = np.random.RandomState(23) data = np.full(len(ch_pos), 5) + rng.randn(len(ch_pos)) info = create_info(len(ch_pos), 250, 'eeg') ch_pos_dict = {name: pos for name, pos in zip(info['ch_names'], ch_pos)} dig = make_dig_montage(ch_pos_dict, coord_frame='head') info.set_montage(dig) # border=0 ax, _ = plot_topomap(data, info, extrapolate='head', border=0, sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert np.abs(img_data[0, 0]) < 1.5 # border='mean' ax, _ = plot_topomap(data, info, extrapolate='head', border='mean', sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert img_data[0, 0] > 5 # error when not numeric or str: error_msg = 'border must be an instance of numeric or str' with pytest.raises(TypeError, match=error_msg): plot_topomap(data, info, extrapolate='head', border=[1, 2, 3]) # error when str is not 'mean': error_msg = "The only allowed value is 'mean', but got 'fancy' instead." with pytest.raises(ValueError, match=error_msg): plot_topomap(data, info, extrapolate='head', border='fancy') # test channel placement when only 'grad' are picked: # --------------------------------------------------- info_grad = evoked.copy().pick('grad').info n_grads = len(info_grad['ch_names']) data = np.random.randn(n_grads) img, _ = plot_topomap(data, info_grad) # check that channels are scattered around x == 0 pos = img.axes.collections[-1].get_offsets() prop_channels_on_the_right = (pos[:, 0] > 0).mean() assert prop_channels_on_the_right < 0.6 # other: # ------ plt_topomap = partial(evoked.plot_topomap, **fast_test) plt.close('all') axes = [plt.subplot(221), plt.subplot(222)] plt_topomap(axes=axes, colorbar=False) plt.close('all') plt_topomap(times=[-0.1, 0.2]) plt.close('all') evoked_grad = evoked.copy().crop(0, 0).pick_types(meg='grad') mask = np.zeros((204, 1), bool) mask[[0, 3, 5, 6]] = True names = [] def proc_names(x): names.append(x) return x[4:] evoked_grad.plot_topomap(ch_type='grad', times=[0], mask=mask, show_names=proc_names, **fast_test) assert_equal(sorted(names), ['MEG 011x', 'MEG 012x', 'MEG 013x', 'MEG 014x']) mask = np.zeros_like(evoked.data, dtype=bool) mask[[1, 5], :] = True plt_topomap(ch_type='mag', outlines=None) times = [0.1] plt_topomap(times, ch_type='grad', mask=mask) plt_topomap(times, ch_type='planar1') plt_topomap(times, ch_type='planar2') plt_topomap(times, ch_type='grad', mask=mask, show_names=True, mask_params={'marker': 'x'}) plt.close('all') with pytest.raises(ValueError, match='number of seconds; got -'): plt_topomap(times, ch_type='eeg', average=-1e3) with pytest.raises(TypeError, match='number of seconds; got type'): plt_topomap(times, ch_type='eeg', average='x') p = plt_topomap(times, ch_type='grad', image_interp='bilinear', show_names=lambda x: x.replace('MEG', '')) subplot = [x for x in p.get_children() if 'Subplot' in str(type(x))] assert len(subplot) >= 1, [type(x) for x in p.get_children()] subplot = subplot[0] have_all = all('MEG' not in x.get_text() for x in subplot.get_children() if isinstance(x, matplotlib.text.Text)) assert have_all # Plot array for ch_type in ('mag', 'grad'): evoked_ = evoked.copy().pick_types(eeg=False, meg=ch_type) plot_topomap(evoked_.data[:, 0], evoked_.info, **fast_test_noscale) # fail with multiple channel types pytest.raises(ValueError, plot_topomap, evoked.data[0, :], evoked.info) # Test title def get_texts(p): return [x.get_text() for x in p.get_children() if isinstance(x, matplotlib.text.Text)] p = plt_topomap(times, ch_type='eeg', average=0.01) assert_equal(len(get_texts(p)), 0) p = plt_topomap(times, ch_type='eeg', title='Custom') texts = get_texts(p) assert_equal(len(texts), 1) assert_equal(texts[0], 'Custom') plt.close('all') # delaunay triangulation warning plt_topomap(times, ch_type='mag') # projs have already been applied pytest.raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag', proj='interactive', time_unit='s') # change to no-proj mode evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0), proj=False) fig1 = evoked.plot_topomap('interactive', 'mag', proj='interactive', **fast_test) _fake_click(fig1, fig1.axes[1], (0.5, 0.5)) # click slider data_max = np.max(fig1.axes[0].images[0]._A) fig2 = plt.gcf() _fake_click(fig2, fig2.axes[0], (0.075, 0.775)) # toggle projector # make sure projector gets toggled assert (np.max(fig1.axes[0].images[0]._A) != data_max) with monkeypatch.context() as m: # speed it up by not actually plotting m.setattr(topomap, '_plot_topomap', lambda *args, **kwargs: (None, None, None)) with pytest.warns(RuntimeWarning, match='More than 25 topomaps plots'): plot_evoked_topomap(evoked, [0.1] * 26, colorbar=False) pytest.raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6], time_unit='s') for ch in evoked.info['chs']: if ch['coil_type'] == FIFF.FIFFV_COIL_EEG: ch['loc'].fill(0) # Remove extra digitization point, so EEG digitization points # correspond with the EEG electrodes del evoked.info['dig'][85] # Plot skirt evoked.plot_topomap(times, ch_type='eeg', outlines='skirt', **fast_test) # Pass custom outlines without patch eeg_picks = pick_types(evoked.info, meg=False, eeg=True) pos, outlines = _get_pos_outlines(evoked.info, eeg_picks, 0.1) evoked.plot_topomap(times, ch_type='eeg', outlines=outlines, **fast_test) plt.close('all') # Test interactive cmap fig = plot_evoked_topomap(evoked, times=[0., 0.1], ch_type='eeg', cmap=('Reds', True), title='title', **fast_test) fig.canvas.key_press_event('up') fig.canvas.key_press_event(' ') fig.canvas.key_press_event('down') cbar = fig.get_axes()[0].CB # Fake dragging with mouse. ax = cbar.cbar.ax _fake_click(fig, ax, (0.1, 0.1)) _fake_click(fig, ax, (0.1, 0.2), kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') _fake_click(fig, ax, (0.1, 0.1), button=3) _fake_click(fig, ax, (0.1, 0.2), button=3, kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up plt.close('all') # Pass custom outlines with patch callable def patch(): return Circle((0.5, 0.4687), radius=.46, clip_on=True, transform=plt.gca().transAxes) outlines['patch'] = patch plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines, **fast_test) # Remove digitization points. Now topomap should fail evoked.info['dig'] = None pytest.raises(RuntimeError, plot_evoked_topomap, evoked, times, ch_type='eeg', time_unit='s') plt.close('all') # Error for missing names n_channels = len(pos) data = np.ones(n_channels) pytest.raises(ValueError, plot_topomap, data, pos, show_names=True) # Test error messages for invalid pos parameter pos_1d = np.zeros(n_channels) pos_3d = np.zeros((n_channels, 2, 2)) pytest.raises(ValueError, plot_topomap, data, pos_1d) pytest.raises(ValueError, plot_topomap, data, pos_3d) pytest.raises(ValueError, plot_topomap, data, pos[:3, :]) pos_x = pos[:, :1] pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]] pytest.raises(ValueError, plot_topomap, data, pos_x) pytest.raises(ValueError, plot_topomap, data, pos_xyz) # An #channels x 4 matrix should work though. In this case (x, y, width, # height) is assumed. pos_xywh = np.c_[pos, np.zeros((n_channels, 2))] plot_topomap(data, pos_xywh) plt.close('all') # Test peak finder axes = [plt.subplot(131), plt.subplot(132)] evoked.plot_topomap(times='peaks', axes=axes, **fast_test) plt.close('all') evoked.data = np.zeros(evoked.data.shape) evoked.data[50][1] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[1]) evoked.data[80][100] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]]) evoked.data[2][95] = 2 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]]) assert_array_equal(_find_peaks(evoked, 1), evoked.times[95]) # Test excluding bads channels evoked_grad.info['bads'] += [evoked_grad.info['ch_names'][0]] orig_bads = evoked_grad.info['bads'] evoked_grad.plot_topomap(ch_type='grad', times=[0], time_unit='ms') assert_array_equal(evoked_grad.info['bads'], orig_bads) plt.close('all') def test_plot_tfr_topomap(): """Test plotting of TFR data.""" raw = read_raw_fif(raw_fname) times = np.linspace(-0.1, 0.1, 200) res = 8 n_freqs = 3 nave = 1 rng = np.random.RandomState(42) picks = [93, 94, 96, 97, 21, 22, 24, 25, 129, 130, 315, 316, 2, 5, 8, 11] info = pick_info(raw.info, picks) data = rng.randn(len(picks), n_freqs, len(times)) tfr = AverageTFR(info, data, times, np.arange(n_freqs), nave) tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10, res=res, contours=0) eclick = matplotlib.backend_bases.MouseEvent( 'button_press_event', plt.gcf().canvas, 0, 0, 1) eclick.xdata = eclick.ydata = 0.1 eclick.inaxes = plt.gca() erelease = matplotlib.backend_bases.MouseEvent( 'button_release_event', plt.gcf().canvas, 0.9, 0.9, 1) erelease.xdata = 0.3 erelease.ydata = 0.2 pos = np.array([[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]]) _onselect(eclick, erelease, tfr, pos, 'grad', 1, 3, 1, 3, 'RdBu_r', list()) _onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list()) eclick.xdata = eclick.ydata = 0. erelease.xdata = erelease.ydata = 0.9 tfr._onselect(eclick, erelease, None, 'mean', None) plt.close('all') # test plot_psds_topomap info = raw.info.copy() chan_inds = channel_indices_by_type(info) info = pick_info(info, chan_inds['grad'][:4]) fig, axes = plt.subplots() freqs = np.arange(3., 9.5) bands = [(4, 8, 'Theta')] psd = np.random.rand(len(info['ch_names']), freqs.shape[0]) plot_psds_topomap(psd, freqs, info, bands=bands, axes=[axes]) def test_ctf_plotting(): """Test CTF topomap plotting.""" raw = read_raw_fif(ctf_fname, preload=True) assert raw.compensation_grade == 3 events = make_fixed_length_events(raw, duration=0.01) assert len(events) > 10 evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average() assert get_current_comp(evoked.info) == 3 # smoke test that compensation does not matter evoked.plot_topomap(time_unit='s') # better test that topomaps can still be used without plotting ref evoked.pick_types(meg=True, ref_meg=False) evoked.plot_topomap() @pytest.mark.slowtest # can be slow on OSX @testing.requires_testing_data def test_plot_arrowmap(): """Test arrowmap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked.data[:, 0], evoked.info) evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked_eeg.data[:, 0], evoked.info) evoked_mag = evoked.copy().pick_types(meg='mag') evoked_grad = evoked.copy().pick_types(meg='grad') plot_arrowmap(evoked_mag.data[:, 0], evoked_mag.info) plot_arrowmap(evoked_grad.data[:, 0], evoked_grad.info, info_to=evoked_mag.info) @testing.requires_testing_data def test_plot_topomap_neuromag122(): """Test topomap plotting.""" res = 8 fast_test = dict(res=res, contours=0, sensors=False) evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) evoked.pick_types(meg='grad') evoked.pick_channels(evoked.ch_names[:122]) ch_names = ['MEG %03d' % k for k in range(1, 123)] for c in evoked.info['chs']: c['coil_type'] = FIFF.FIFFV_COIL_NM_122 evoked.rename_channels({c_old: c_new for (c_old, c_new) in zip(evoked.ch_names, ch_names)}) layout = find_layout(evoked.info) assert layout.kind.startswith('Neuromag_122') evoked.plot_topomap(times=[0.1], **fast_test) proj = Projection(active=False, desc="test", kind=1, data=dict(nrow=1, ncol=122, row_names=None, col_names=evoked.ch_names, data=np.ones(122)), explained_var=0.5) plot_projs_topomap([proj], evoked.info, **fast_test) def test_plot_topomap_bads(): """Test plotting topomap with bad channels (gh-7213).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(3, 1000) raw = RawArray(data, create_info(3, 1000., 'eeg')) ch_pos_dict = {name: pos for name, pos in zip(raw.ch_names, np.eye(3))} raw.info.set_montage(make_dig_montage(ch_pos_dict, coord_frame='head')) for count in range(3): raw.info['bads'] = raw.ch_names[:count] raw.info._check_consistency() plot_topomap(data[:, 0], raw.info) plt.close('all') def test_plot_topomap_bads_grad(): """Test plotting topomap with bad gradiometer channels (gh-8802).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(203) info = read_info(evoked_fname) info['bads'] = ['MEG 2242'] picks = pick_types(info, meg='grad') info = pick_info(info, picks) assert len(info['chs']) == 203 plot_topomap(data, info, res=8) plt.close('all') def test_plot_topomap_nirs_overlap(fnirs_epochs): """Test plotting nirs topomap with overlapping channels (gh-7414).""" fig = fnirs_epochs['A'].average(picks='hbo').plot_topomap() assert len(fig.axes) == 5 plt.close('all') @requires_sklearn def test_plot_topomap_nirs_ica(fnirs_epochs): """Test plotting nirs ica topomap.""" from mne.preprocessing import ICA fnirs_epochs = fnirs_epochs.load_data().pick(picks='hbo') fnirs_epochs = fnirs_epochs.pick(picks=range(30)) ica = ICA().fit(fnirs_epochs) fig = ica.plot_components() assert len(fig[0].axes) == 20 plt.close('all') def test_plot_cov_topomap(): """Test plotting a covariance topomap.""" cov = read_cov(cov_fname) info = read_info(evoked_fname) cov.plot_topomap(info) cov.plot_topomap(info, noise_cov=cov) plt.close('all')
wmvanvliet/mne-python
mne/viz/tests/test_topomap.py
mne/commands/mne_compare_fiff.py
# -*- coding: utf-8 -*- """Coregistration between different coordinate frames.""" # Authors: Christian Brodbeck <christianbrodbeck@nyu.edu> # # License: BSD (3-clause) import configparser import fnmatch from glob import glob, iglob import os import os.path as op import stat import sys import re import shutil from functools import reduce import numpy as np from .io import read_fiducials, write_fiducials, read_info from .io.constants import FIFF from .label import read_label, Label from .source_space import (add_source_space_distances, read_source_spaces, write_source_spaces, read_talxfm, _read_mri_info) from .surface import read_surface, write_surface, _normalize_vectors from .bem import read_bem_surfaces, write_bem_surfaces from .transforms import (rotation, rotation3d, scaling, translation, Transform, _read_fs_xfm, _write_fs_xfm, invert_transform, combine_transforms, apply_trans, _quat_to_euler, _fit_matched_points) from .utils import (get_config, get_subjects_dir, logger, pformat, verbose, warn, has_nibabel) from .viz._3d import _fiducial_coords # some path templates trans_fname = os.path.join('{raw_dir}', '{subject}-trans.fif') subject_dirname = os.path.join('{subjects_dir}', '{subject}') bem_dirname = os.path.join(subject_dirname, 'bem') mri_dirname = os.path.join(subject_dirname, 'mri') mri_transforms_dirname = os.path.join(subject_dirname, 'mri', 'transforms') surf_dirname = os.path.join(subject_dirname, 'surf') bem_fname = os.path.join(bem_dirname, "{subject}-{name}.fif") head_bem_fname = pformat(bem_fname, name='head') fid_fname = pformat(bem_fname, name='fiducials') fid_fname_general = os.path.join(bem_dirname, "{head}-fiducials.fif") src_fname = os.path.join(bem_dirname, '{subject}-{spacing}-src.fif') _head_fnames = (os.path.join(bem_dirname, 'outer_skin.surf'), head_bem_fname) _high_res_head_fnames = (os.path.join(bem_dirname, '{subject}-head-dense.fif'), os.path.join(surf_dirname, 'lh.seghead'), os.path.join(surf_dirname, 'lh.smseghead')) def _make_writable(fname): """Make a file writable.""" os.chmod(fname, stat.S_IMODE(os.lstat(fname)[stat.ST_MODE]) | 128) # write def _make_writable_recursive(path): """Recursively set writable.""" if sys.platform.startswith('win'): return # can't safely set perms for root, dirs, files in os.walk(path, topdown=False): for f in dirs + files: _make_writable(os.path.join(root, f)) def _find_head_bem(subject, subjects_dir, high_res=False): """Find a high resolution head.""" # XXX this should be refactored with mne.surface.get_head_surf ... fnames = _high_res_head_fnames if high_res else _head_fnames for fname in fnames: path = fname.format(subjects_dir=subjects_dir, subject=subject) if os.path.exists(path): return path def coregister_fiducials(info, fiducials, tol=0.01): """Create a head-MRI transform by aligning 3 fiducial points. Parameters ---------- info : Info Measurement info object with fiducials in head coordinate space. fiducials : str | list of dict Fiducials in MRI coordinate space (either path to a ``*-fiducials.fif`` file or list of fiducials as returned by :func:`read_fiducials`. Returns ------- trans : Transform The device-MRI transform. """ if isinstance(info, str): info = read_info(info) if isinstance(fiducials, str): fiducials, coord_frame_to = read_fiducials(fiducials) else: coord_frame_to = FIFF.FIFFV_COORD_MRI frames_from = {d['coord_frame'] for d in info['dig']} if len(frames_from) > 1: raise ValueError("info contains fiducials from different coordinate " "frames") else: coord_frame_from = frames_from.pop() coords_from = _fiducial_coords(info['dig']) coords_to = _fiducial_coords(fiducials, coord_frame_to) trans = fit_matched_points(coords_from, coords_to, tol=tol) return Transform(coord_frame_from, coord_frame_to, trans) @verbose def create_default_subject(fs_home=None, update=False, subjects_dir=None, verbose=None): """Create an average brain subject for subjects without structural MRI. Create a copy of fsaverage from the Freesurfer directory in subjects_dir and add auxiliary files from the mne package. Parameters ---------- fs_home : None | str The freesurfer home directory (only needed if FREESURFER_HOME is not specified as environment variable). update : bool In cases where a copy of the fsaverage brain already exists in the subjects_dir, this option allows to only copy files that don't already exist in the fsaverage directory. subjects_dir : None | str Override the SUBJECTS_DIR environment variable (os.environ['SUBJECTS_DIR']) as destination for the new subject. %(verbose)s Notes ----- When no structural MRI is available for a subject, an average brain can be substituted. Freesurfer comes with such an average brain model, and MNE comes with some auxiliary files which make coregistration easier. :py:func:`create_default_subject` copies the relevant files from Freesurfer into the current subjects_dir, and also adds the auxiliary files provided by MNE. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if fs_home is None: fs_home = get_config('FREESURFER_HOME', fs_home) if fs_home is None: raise ValueError( "FREESURFER_HOME environment variable not found. Please " "specify the fs_home parameter in your call to " "create_default_subject().") # make sure freesurfer files exist fs_src = os.path.join(fs_home, 'subjects', 'fsaverage') if not os.path.exists(fs_src): raise IOError('fsaverage not found at %r. Is fs_home specified ' 'correctly?' % fs_src) for name in ('label', 'mri', 'surf'): dirname = os.path.join(fs_src, name) if not os.path.isdir(dirname): raise IOError("Freesurfer fsaverage seems to be incomplete: No " "directory named %s found in %s" % (name, fs_src)) # make sure destination does not already exist dest = os.path.join(subjects_dir, 'fsaverage') if dest == fs_src: raise IOError( "Your subjects_dir points to the freesurfer subjects_dir (%r). " "The default subject can not be created in the freesurfer " "installation directory; please specify a different " "subjects_dir." % subjects_dir) elif (not update) and os.path.exists(dest): raise IOError( "Can not create fsaverage because %r already exists in " "subjects_dir %r. Delete or rename the existing fsaverage " "subject folder." % ('fsaverage', subjects_dir)) # copy fsaverage from freesurfer logger.info("Copying fsaverage subject from freesurfer directory...") if (not update) or not os.path.exists(dest): shutil.copytree(fs_src, dest) _make_writable_recursive(dest) # copy files from mne source_fname = os.path.join(os.path.dirname(__file__), 'data', 'fsaverage', 'fsaverage-%s.fif') dest_bem = os.path.join(dest, 'bem') if not os.path.exists(dest_bem): os.mkdir(dest_bem) logger.info("Copying auxiliary fsaverage files from mne...") dest_fname = os.path.join(dest_bem, 'fsaverage-%s.fif') _make_writable_recursive(dest_bem) for name in ('fiducials', 'head', 'inner_skull-bem', 'trans'): if not os.path.exists(dest_fname % name): shutil.copy(source_fname % name, dest_bem) def _decimate_points(pts, res=10): """Decimate the number of points using a voxel grid. Create a voxel grid with a specified resolution and retain at most one point per voxel. For each voxel, the point closest to its center is retained. Parameters ---------- pts : array, shape (n_points, 3) The points making up the head shape. res : scalar The resolution of the voxel space (side length of each voxel). Returns ------- pts : array, shape = (n_points, 3) The decimated points. """ from scipy.spatial.distance import cdist pts = np.asarray(pts) # find the bin edges for the voxel space xmin, ymin, zmin = pts.min(0) - res / 2. xmax, ymax, zmax = pts.max(0) + res xax = np.arange(xmin, xmax, res) yax = np.arange(ymin, ymax, res) zax = np.arange(zmin, zmax, res) # find voxels containing one or more point H, _ = np.histogramdd(pts, bins=(xax, yax, zax), normed=False) # for each voxel, select one point X, Y, Z = pts.T out = np.empty((np.sum(H > 0), 3)) for i, (xbin, ybin, zbin) in enumerate(zip(*np.nonzero(H))): x = xax[xbin] y = yax[ybin] z = zax[zbin] xi = np.logical_and(X >= x, X < x + res) yi = np.logical_and(Y >= y, Y < y + res) zi = np.logical_and(Z >= z, Z < z + res) idx = np.logical_and(zi, np.logical_and(yi, xi)) ipts = pts[idx] mid = np.array([x, y, z]) + res / 2. dist = cdist(ipts, [mid]) i_min = np.argmin(dist) ipt = ipts[i_min] out[i] = ipt return out def _trans_from_params(param_info, params): """Convert transformation parameters into a transformation matrix. Parameters ---------- param_info : tuple, len = 3 Tuple describing the parameters in x (do_translate, do_rotate, do_scale). params : tuple The transformation parameters. Returns ------- trans : array, shape = (4, 4) Transformation matrix. """ do_rotate, do_translate, do_scale = param_info i = 0 trans = [] if do_rotate: x, y, z = params[:3] trans.append(rotation(x, y, z)) i += 3 if do_translate: x, y, z = params[i:i + 3] trans.insert(0, translation(x, y, z)) i += 3 if do_scale == 1: s = params[i] trans.append(scaling(s, s, s)) elif do_scale == 3: x, y, z = params[i:i + 3] trans.append(scaling(x, y, z)) trans = reduce(np.dot, trans) return trans _ALLOW_ANALITICAL = True # XXX this function should be moved out of coreg as used elsewhere def fit_matched_points(src_pts, tgt_pts, rotate=True, translate=True, scale=False, tol=None, x0=None, out='trans', weights=None): """Find a transform between matched sets of points. This minimizes the squared distance between two matching sets of points. Uses :func:`scipy.optimize.leastsq` to find a transformation involving a combination of rotation, translation, and scaling (in that order). Parameters ---------- src_pts : array, shape = (n, 3) Points to which the transform should be applied. tgt_pts : array, shape = (n, 3) Points to which src_pts should be fitted. Each point in tgt_pts should correspond to the point in src_pts with the same index. rotate : bool Allow rotation of the ``src_pts``. translate : bool Allow translation of the ``src_pts``. scale : bool Number of scaling parameters. With False, points are not scaled. With True, points are scaled by the same factor along all axes. tol : scalar | None The error tolerance. If the distance between any of the matched points exceeds this value in the solution, a RuntimeError is raised. With None, no error check is performed. x0 : None | tuple Initial values for the fit parameters. out : 'params' | 'trans' In what format to return the estimate: 'params' returns a tuple with the fit parameters; 'trans' returns a transformation matrix of shape (4, 4). Returns ------- trans : array, shape (4, 4) Transformation that, if applied to src_pts, minimizes the squared distance to tgt_pts. Only returned if out=='trans'. params : array, shape (n_params, ) A single tuple containing the rotation, translation, and scaling parameters in that order (as applicable). """ src_pts = np.atleast_2d(src_pts) tgt_pts = np.atleast_2d(tgt_pts) if src_pts.shape != tgt_pts.shape: raise ValueError("src_pts and tgt_pts must have same shape (got " "{}, {})".format(src_pts.shape, tgt_pts.shape)) if weights is not None: weights = np.asarray(weights, src_pts.dtype) if weights.ndim != 1 or weights.size not in (src_pts.shape[0], 1): raise ValueError("weights (shape=%s) must be None or have shape " "(%s,)" % (weights.shape, src_pts.shape[0],)) weights = weights[:, np.newaxis] param_info = (bool(rotate), bool(translate), int(scale)) del rotate, translate, scale # very common use case, rigid transformation (maybe with one scale factor, # with or without weighted errors) if param_info in ((True, True, 0), (True, True, 1)) and _ALLOW_ANALITICAL: src_pts = np.asarray(src_pts, float) tgt_pts = np.asarray(tgt_pts, float) x, s = _fit_matched_points( src_pts, tgt_pts, weights, bool(param_info[2])) x[:3] = _quat_to_euler(x[:3]) x = np.concatenate((x, [s])) if param_info[2] else x else: x = _generic_fit(src_pts, tgt_pts, param_info, weights, x0) # re-create the final transformation matrix if (tol is not None) or (out == 'trans'): trans = _trans_from_params(param_info, x) # assess the error of the solution if tol is not None: src_pts = np.hstack((src_pts, np.ones((len(src_pts), 1)))) est_pts = np.dot(src_pts, trans.T)[:, :3] err = np.sqrt(np.sum((est_pts - tgt_pts) ** 2, axis=1)) if np.any(err > tol): raise RuntimeError("Error exceeds tolerance. Error = %r" % err) if out == 'params': return x elif out == 'trans': return trans else: raise ValueError("Invalid out parameter: %r. Needs to be 'params' or " "'trans'." % out) def _generic_fit(src_pts, tgt_pts, param_info, weights, x0): from scipy.optimize import leastsq if param_info[1]: # translate src_pts = np.hstack((src_pts, np.ones((len(src_pts), 1)))) if param_info == (True, False, 0): def error(x): rx, ry, rz = x trans = rotation3d(rx, ry, rz) est = np.dot(src_pts, trans.T) d = tgt_pts - est if weights is not None: d *= weights return d.ravel() if x0 is None: x0 = (0, 0, 0) elif param_info == (True, True, 0): def error(x): rx, ry, rz, tx, ty, tz = x trans = np.dot(translation(tx, ty, tz), rotation(rx, ry, rz)) est = np.dot(src_pts, trans.T)[:, :3] d = tgt_pts - est if weights is not None: d *= weights return d.ravel() if x0 is None: x0 = (0, 0, 0, 0, 0, 0) elif param_info == (True, True, 1): def error(x): rx, ry, rz, tx, ty, tz, s = x trans = reduce(np.dot, (translation(tx, ty, tz), rotation(rx, ry, rz), scaling(s, s, s))) est = np.dot(src_pts, trans.T)[:, :3] d = tgt_pts - est if weights is not None: d *= weights return d.ravel() if x0 is None: x0 = (0, 0, 0, 0, 0, 0, 1) elif param_info == (True, True, 3): def error(x): rx, ry, rz, tx, ty, tz, sx, sy, sz = x trans = reduce(np.dot, (translation(tx, ty, tz), rotation(rx, ry, rz), scaling(sx, sy, sz))) est = np.dot(src_pts, trans.T)[:, :3] d = tgt_pts - est if weights is not None: d *= weights return d.ravel() if x0 is None: x0 = (0, 0, 0, 0, 0, 0, 1, 1, 1) else: raise NotImplementedError( "The specified parameter combination is not implemented: " "rotate=%r, translate=%r, scale=%r" % param_info) x, _, _, _, _ = leastsq(error, x0, full_output=True) return x def _find_label_paths(subject='fsaverage', pattern=None, subjects_dir=None): """Find paths to label files in a subject's label directory. Parameters ---------- subject : str Name of the mri subject. pattern : str | None Pattern for finding the labels relative to the label directory in the MRI subject directory (e.g., "aparc/*.label" will find all labels in the "subject/label/aparc" directory). With None, find all labels. subjects_dir : None | str Override the SUBJECTS_DIR environment variable (sys.environ['SUBJECTS_DIR']) Returns ------- paths : list List of paths relative to the subject's label directory """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) subject_dir = os.path.join(subjects_dir, subject) lbl_dir = os.path.join(subject_dir, 'label') if pattern is None: paths = [] for dirpath, _, filenames in os.walk(lbl_dir): rel_dir = os.path.relpath(dirpath, lbl_dir) for filename in fnmatch.filter(filenames, '*.label'): path = os.path.join(rel_dir, filename) paths.append(path) else: paths = [os.path.relpath(path, lbl_dir) for path in iglob(pattern)] return paths def _find_mri_paths(subject, skip_fiducials, subjects_dir): """Find all files of an mri relevant for source transformation. Parameters ---------- subject : str Name of the mri subject. skip_fiducials : bool Do not scale the MRI fiducials. If False, an IOError will be raised if no fiducials file can be found. subjects_dir : None | str Override the SUBJECTS_DIR environment variable (sys.environ['SUBJECTS_DIR']) Returns ------- paths : dict Dictionary whose keys are relevant file type names (str), and whose values are lists of paths. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) paths = {} # directories to create paths['dirs'] = [bem_dirname, surf_dirname] # surf/ files paths['surf'] = [] surf_fname = os.path.join(surf_dirname, '{name}') surf_names = ('inflated', 'white', 'orig', 'orig_avg', 'inflated_avg', 'inflated_pre', 'pial', 'pial_avg', 'smoothwm', 'white_avg', 'seghead', 'smseghead') if os.getenv('_MNE_FEW_SURFACES', '') == 'true': # for testing surf_names = surf_names[:4] for surf_name in surf_names: for hemi in ('lh.', 'rh.'): name = hemi + surf_name path = surf_fname.format(subjects_dir=subjects_dir, subject=subject, name=name) if os.path.exists(path): paths['surf'].append(pformat(surf_fname, name=name)) surf_fname = os.path.join(bem_dirname, '{name}') surf_names = ('inner_skull.surf', 'outer_skull.surf', 'outer_skin.surf') for surf_name in surf_names: path = surf_fname.format(subjects_dir=subjects_dir, subject=subject, name=surf_name) if os.path.exists(path): paths['surf'].append(pformat(surf_fname, name=surf_name)) del surf_names, surf_name, path, hemi # BEM files paths['bem'] = bem = [] path = head_bem_fname.format(subjects_dir=subjects_dir, subject=subject) if os.path.exists(path): bem.append('head') bem_pattern = pformat(bem_fname, subjects_dir=subjects_dir, subject=subject, name='*-bem') re_pattern = pformat(bem_fname, subjects_dir=subjects_dir, subject=subject, name='(.+)').replace('\\', '\\\\') for path in iglob(bem_pattern): match = re.match(re_pattern, path) name = match.group(1) bem.append(name) del bem, path, bem_pattern, re_pattern # fiducials if skip_fiducials: paths['fid'] = [] else: paths['fid'] = _find_fiducials_files(subject, subjects_dir) # check that we found at least one if len(paths['fid']) == 0: raise IOError("No fiducials file found for %s. The fiducials " "file should be named " "{subject}/bem/{subject}-fiducials.fif. In " "order to scale an MRI without fiducials set " "skip_fiducials=True." % subject) # duplicate files (curvature and some surfaces) paths['duplicate'] = [] path = os.path.join(surf_dirname, '{name}') surf_fname = os.path.join(surf_dirname, '{name}') surf_dup_names = ('curv', 'sphere', 'sphere.reg', 'sphere.reg.avg') for surf_dup_name in surf_dup_names: for hemi in ('lh.', 'rh.'): name = hemi + surf_dup_name path = surf_fname.format(subjects_dir=subjects_dir, subject=subject, name=name) if os.path.exists(path): paths['duplicate'].append(pformat(surf_fname, name=name)) del surf_dup_name, name, path, hemi # transform files (talairach) paths['transforms'] = [] transform_fname = os.path.join(mri_transforms_dirname, 'talairach.xfm') path = transform_fname.format(subjects_dir=subjects_dir, subject=subject) if os.path.exists(path): paths['transforms'].append(transform_fname) del transform_fname, path # find source space files paths['src'] = src = [] bem_dir = bem_dirname.format(subjects_dir=subjects_dir, subject=subject) fnames = fnmatch.filter(os.listdir(bem_dir), '*-src.fif') prefix = subject + '-' for fname in fnames: if fname.startswith(prefix): fname = "{subject}-%s" % fname[len(prefix):] path = os.path.join(bem_dirname, fname) src.append(path) # find MRIs mri_dir = mri_dirname.format(subjects_dir=subjects_dir, subject=subject) fnames = fnmatch.filter(os.listdir(mri_dir), '*.mgz') paths['mri'] = [os.path.join(mri_dir, f) for f in fnames] return paths def _find_fiducials_files(subject, subjects_dir): """Find fiducial files.""" fid = [] # standard fiducials if os.path.exists(fid_fname.format(subjects_dir=subjects_dir, subject=subject)): fid.append(fid_fname) # fiducials with subject name pattern = pformat(fid_fname_general, subjects_dir=subjects_dir, subject=subject, head='*') regex = pformat(fid_fname_general, subjects_dir=subjects_dir, subject=subject, head='(.+)').replace('\\', '\\\\') for path in iglob(pattern): match = re.match(regex, path) head = match.group(1).replace(subject, '{subject}') fid.append(pformat(fid_fname_general, head=head)) return fid def _is_mri_subject(subject, subjects_dir=None): """Check whether a directory in subjects_dir is an mri subject directory. Parameters ---------- subject : str Name of the potential subject/directory. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. Returns ------- is_mri_subject : bool Whether ``subject`` is an mri subject. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) return bool(_find_head_bem(subject, subjects_dir) or _find_head_bem(subject, subjects_dir, high_res=True)) def _is_scaled_mri_subject(subject, subjects_dir=None): """Check whether a directory in subjects_dir is a scaled mri subject. Parameters ---------- subject : str Name of the potential subject/directory. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. Returns ------- is_scaled_mri_subject : bool Whether ``subject`` is a scaled mri subject. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if not _is_mri_subject(subject, subjects_dir): return False fname = os.path.join(subjects_dir, subject, 'MRI scaling parameters.cfg') return os.path.exists(fname) def _mri_subject_has_bem(subject, subjects_dir=None): """Check whether an mri subject has a file matching the bem pattern. Parameters ---------- subject : str Name of the subject. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. Returns ------- has_bem_file : bool Whether ``subject`` has a bem file. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) pattern = bem_fname.format(subjects_dir=subjects_dir, subject=subject, name='*-bem') fnames = glob(pattern) return bool(len(fnames)) def read_mri_cfg(subject, subjects_dir=None): """Read information from the cfg file of a scaled MRI brain. Parameters ---------- subject : str Name of the scaled MRI subject. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. Returns ------- cfg : dict Dictionary with entries from the MRI's cfg file. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) fname = os.path.join(subjects_dir, subject, 'MRI scaling parameters.cfg') if not os.path.exists(fname): raise IOError("%r does not seem to be a scaled mri subject: %r does " "not exist." % (subject, fname)) logger.info("Reading MRI cfg file %s" % fname) config = configparser.RawConfigParser() config.read(fname) n_params = config.getint("MRI Scaling", 'n_params') if n_params == 1: scale = config.getfloat("MRI Scaling", 'scale') elif n_params == 3: scale_str = config.get("MRI Scaling", 'scale') scale = np.array([float(s) for s in scale_str.split()]) else: raise ValueError("Invalid n_params value in MRI cfg: %i" % n_params) out = {'subject_from': config.get("MRI Scaling", 'subject_from'), 'n_params': n_params, 'scale': scale} return out def _write_mri_config(fname, subject_from, subject_to, scale): """Write the cfg file describing a scaled MRI subject. Parameters ---------- fname : str Target file. subject_from : str Name of the source MRI subject. subject_to : str Name of the scaled MRI subject. scale : float | array_like, shape = (3,) The scaling parameter. """ scale = np.asarray(scale) if np.isscalar(scale) or scale.shape == (): n_params = 1 else: n_params = 3 config = configparser.RawConfigParser() config.add_section("MRI Scaling") config.set("MRI Scaling", 'subject_from', subject_from) config.set("MRI Scaling", 'subject_to', subject_to) config.set("MRI Scaling", 'n_params', str(n_params)) if n_params == 1: config.set("MRI Scaling", 'scale', str(scale)) else: config.set("MRI Scaling", 'scale', ' '.join([str(s) for s in scale])) config.set("MRI Scaling", 'version', '1') with open(fname, 'w') as fid: config.write(fid) def _scale_params(subject_to, subject_from, scale, subjects_dir): """Assemble parameters for scaling. Returns ------- subjects_dir : str Subjects directory. subject_from : str Name of the source subject. scale : array Scaling factor, either shape=() for uniform scaling or shape=(3,) for non-uniform scaling. uniform : bool Whether scaling is uniform. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if (subject_from is None) != (scale is None): raise TypeError("Need to provide either both subject_from and scale " "parameters, or neither.") if subject_from is None: cfg = read_mri_cfg(subject_to, subjects_dir) subject_from = cfg['subject_from'] n_params = cfg['n_params'] assert n_params in (1, 3) scale = cfg['scale'] scale = np.atleast_1d(scale) if scale.ndim != 1 or scale.shape[0] not in (1, 3): raise ValueError("Invalid shape for scale parameer. Need scalar " "or array of length 3. Got shape %s." % (scale.shape,)) n_params = len(scale) return subjects_dir, subject_from, scale, n_params == 1 @verbose def scale_bem(subject_to, bem_name, subject_from=None, scale=None, subjects_dir=None, verbose=None): """Scale a bem file. Parameters ---------- subject_to : str Name of the scaled MRI subject (the destination mri subject). bem_name : str Name of the bem file. For example, to scale ``fsaverage-inner_skull-bem.fif``, the bem_name would be "inner_skull-bem". subject_from : None | str The subject from which to read the source space. If None, subject_from is read from subject_to's config file. scale : None | float | array, shape = (3,) Scaling factor. Has to be specified if subjects_from is specified, otherwise it is read from subject_to's config file. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. %(verbose)s """ subjects_dir, subject_from, scale, uniform = \ _scale_params(subject_to, subject_from, scale, subjects_dir) src = bem_fname.format(subjects_dir=subjects_dir, subject=subject_from, name=bem_name) dst = bem_fname.format(subjects_dir=subjects_dir, subject=subject_to, name=bem_name) if os.path.exists(dst): raise IOError("File already exists: %s" % dst) surfs = read_bem_surfaces(src) for surf in surfs: surf['rr'] *= scale if not uniform: assert len(surf['nn']) > 0 surf['nn'] /= scale _normalize_vectors(surf['nn']) write_bem_surfaces(dst, surfs) def scale_labels(subject_to, pattern=None, overwrite=False, subject_from=None, scale=None, subjects_dir=None): r"""Scale labels to match a brain that was previously created by scaling. Parameters ---------- subject_to : str Name of the scaled MRI subject (the destination brain). pattern : str | None Pattern for finding the labels relative to the label directory in the MRI subject directory (e.g., "lh.BA3a.label" will scale "fsaverage/label/lh.BA3a.label"; "aparc/\*.label" will find all labels in the "fsaverage/label/aparc" directory). With None, scale all labels. overwrite : bool Overwrite any label file that already exists for subject_to (otherwise existing labels are skipped). subject_from : None | str Name of the original MRI subject (the brain that was scaled to create subject_to). If None, the value is read from subject_to's cfg file. scale : None | float | array_like, shape = (3,) Scaling parameter. If None, the value is read from subject_to's cfg file. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. """ subjects_dir, subject_from, scale, _ = _scale_params( subject_to, subject_from, scale, subjects_dir) # find labels paths = _find_label_paths(subject_from, pattern, subjects_dir) if not paths: return subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) src_root = os.path.join(subjects_dir, subject_from, 'label') dst_root = os.path.join(subjects_dir, subject_to, 'label') # scale labels for fname in paths: dst = os.path.join(dst_root, fname) if not overwrite and os.path.exists(dst): continue dirname = os.path.dirname(dst) if not os.path.exists(dirname): os.makedirs(dirname) src = os.path.join(src_root, fname) l_old = read_label(src) pos = l_old.pos * scale l_new = Label(l_old.vertices, pos, l_old.values, l_old.hemi, l_old.comment, subject=subject_to) l_new.save(dst) @verbose def scale_mri(subject_from, subject_to, scale, overwrite=False, subjects_dir=None, skip_fiducials=False, labels=True, annot=False, verbose=None): """Create a scaled copy of an MRI subject. Parameters ---------- subject_from : str Name of the subject providing the MRI. subject_to : str New subject name for which to save the scaled MRI. scale : float | array_like, shape = (3,) The scaling factor (one or 3 parameters). overwrite : bool If an MRI already exists for subject_to, overwrite it. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. skip_fiducials : bool Do not scale the MRI fiducials. If False (default), an IOError will be raised if no fiducials file can be found. labels : bool Also scale all labels (default True). annot : bool Copy ``*.annot`` files to the new location (default False). %(verbose)s See Also -------- scale_labels : Add labels to a scaled MRI. scale_source_space : Add a source space to a scaled MRI. """ subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) paths = _find_mri_paths(subject_from, skip_fiducials, subjects_dir) scale = np.atleast_1d(scale) if scale.shape == (3,): if np.isclose(scale[1], scale[0]) and np.isclose(scale[2], scale[0]): scale = scale[0] # speed up scaling conditionals using a singleton elif scale.shape != (1,): raise ValueError('scale must have shape (3,) or (1,), got %s' % (scale.shape,)) # make sure we have an empty target directory dest = subject_dirname.format(subject=subject_to, subjects_dir=subjects_dir) if os.path.exists(dest): if not overwrite: raise IOError("Subject directory for %s already exists: %r" % (subject_to, dest)) shutil.rmtree(dest) logger.debug('create empty directory structure') for dirname in paths['dirs']: dir_ = dirname.format(subject=subject_to, subjects_dir=subjects_dir) os.makedirs(dir_) logger.debug('save MRI scaling parameters') fname = os.path.join(dest, 'MRI scaling parameters.cfg') _write_mri_config(fname, subject_from, subject_to, scale) logger.debug('surf files [in mm]') for fname in paths['surf']: src = fname.format(subject=subject_from, subjects_dir=subjects_dir) src = os.path.realpath(src) dest = fname.format(subject=subject_to, subjects_dir=subjects_dir) pts, tri = read_surface(src) write_surface(dest, pts * scale, tri) logger.debug('BEM files [in m]') for bem_name in paths['bem']: scale_bem(subject_to, bem_name, subject_from, scale, subjects_dir, verbose=False) logger.debug('fiducials [in m]') for fname in paths['fid']: src = fname.format(subject=subject_from, subjects_dir=subjects_dir) src = os.path.realpath(src) pts, cframe = read_fiducials(src, verbose=False) for pt in pts: pt['r'] = pt['r'] * scale dest = fname.format(subject=subject_to, subjects_dir=subjects_dir) write_fiducials(dest, pts, cframe, verbose=False) logger.debug('MRIs [nibabel]') os.mkdir(mri_dirname.format(subjects_dir=subjects_dir, subject=subject_to)) for fname in paths['mri']: mri_name = os.path.basename(fname) _scale_mri(subject_to, mri_name, subject_from, scale, subjects_dir) logger.debug('Transforms') for mri_name in paths['mri']: if mri_name.endswith('T1.mgz'): os.mkdir(mri_transforms_dirname.format(subjects_dir=subjects_dir, subject=subject_to)) for fname in paths['transforms']: xfm_name = os.path.basename(fname) _scale_xfm(subject_to, xfm_name, mri_name, subject_from, scale, subjects_dir) break logger.debug('duplicate files') for fname in paths['duplicate']: src = fname.format(subject=subject_from, subjects_dir=subjects_dir) dest = fname.format(subject=subject_to, subjects_dir=subjects_dir) shutil.copyfile(src, dest) logger.debug('source spaces') for fname in paths['src']: src_name = os.path.basename(fname) scale_source_space(subject_to, src_name, subject_from, scale, subjects_dir, verbose=False) logger.debug('labels [in m]') os.mkdir(os.path.join(subjects_dir, subject_to, 'label')) if labels: scale_labels(subject_to, subject_from=subject_from, scale=scale, subjects_dir=subjects_dir) logger.debug('copy *.annot files') # they don't contain scale-dependent information if annot: src_pattern = os.path.join(subjects_dir, subject_from, 'label', '*.annot') dst_dir = os.path.join(subjects_dir, subject_to, 'label') for src_file in iglob(src_pattern): shutil.copy(src_file, dst_dir) @verbose def scale_source_space(subject_to, src_name, subject_from=None, scale=None, subjects_dir=None, n_jobs=1, verbose=None): """Scale a source space for an mri created with scale_mri(). Parameters ---------- subject_to : str Name of the scaled MRI subject (the destination mri subject). src_name : str Source space name. Can be a spacing parameter (e.g., ``'7'``, ``'ico4'``, ``'oct6'``) or a file name of a source space file relative to the bem directory; if the file name contains the subject name, it should be indicated as "{subject}" in ``src_name`` (e.g., ``"{subject}-my_source_space-src.fif"``). subject_from : None | str The subject from which to read the source space. If None, subject_from is read from subject_to's config file. scale : None | float | array, shape = (3,) Scaling factor. Has to be specified if subjects_from is specified, otherwise it is read from subject_to's config file. subjects_dir : None | str Override the SUBJECTS_DIR environment variable. n_jobs : int Number of jobs to run in parallel if recomputing distances (only applies if scale is an array of length 3, and will not use more cores than there are source spaces). %(verbose)s Notes ----- When scaling volume source spaces, the source (vertex) locations are scaled, but the reference to the MRI volume is left unchanged. Transforms are updated so that source estimates can be plotted on the original MRI volume. """ subjects_dir, subject_from, scale, uniform = \ _scale_params(subject_to, subject_from, scale, subjects_dir) # if n_params==1 scale is a scalar; if n_params==3 scale is a (3,) array # find the source space file names if src_name.isdigit(): spacing = src_name # spacing in mm src_pattern = src_fname else: match = re.match(r"(oct|ico|vol)-?(\d+)$", src_name) if match: spacing = '-'.join(match.groups()) src_pattern = src_fname else: spacing = None src_pattern = os.path.join(bem_dirname, src_name) src = src_pattern.format(subjects_dir=subjects_dir, subject=subject_from, spacing=spacing) dst = src_pattern.format(subjects_dir=subjects_dir, subject=subject_to, spacing=spacing) # read and scale the source space [in m] sss = read_source_spaces(src) logger.info("scaling source space %s: %s -> %s", spacing, subject_from, subject_to) logger.info("Scale factor: %s", scale) add_dist = False for ss in sss: ss['subject_his_id'] = subject_to ss['rr'] *= scale # additional tags for volume source spaces for key in ('vox_mri_t', 'src_mri_t'): # maintain transform to original MRI volume ss['mri_volume_name'] if key in ss: ss[key]['trans'][:3] *= scale[:, np.newaxis] # distances and patch info if uniform: if ss['dist'] is not None: ss['dist'] *= scale[0] # Sometimes this is read-only due to how it's read ss['nearest_dist'] = ss['nearest_dist'] * scale ss['dist_limit'] = ss['dist_limit'] * scale else: # non-uniform scaling ss['nn'] /= scale _normalize_vectors(ss['nn']) if ss['dist'] is not None: add_dist = True dist_limit = float(np.abs(sss[0]['dist_limit'])) elif ss['nearest'] is not None: add_dist = True dist_limit = 0 if add_dist: logger.info("Recomputing distances, this might take a while") add_source_space_distances(sss, dist_limit, n_jobs) write_source_spaces(dst, sss) def _scale_mri(subject_to, mri_fname, subject_from, scale, subjects_dir): """Scale an MRI by setting its affine.""" subjects_dir, subject_from, scale, _ = _scale_params( subject_to, subject_from, scale, subjects_dir) if not has_nibabel(): warn('Skipping MRI scaling for %s, please install nibabel') return import nibabel fname_from = op.join(mri_dirname.format( subjects_dir=subjects_dir, subject=subject_from), mri_fname) fname_to = op.join(mri_dirname.format( subjects_dir=subjects_dir, subject=subject_to), mri_fname) img = nibabel.load(fname_from) zooms = np.array(img.header.get_zooms()) zooms[[0, 2, 1]] *= scale img.header.set_zooms(zooms) # Hack to fix nibabel problems, see # https://github.com/nipy/nibabel/issues/619 img._affine = img.header.get_affine() # or could use None nibabel.save(img, fname_to) def _scale_xfm(subject_to, xfm_fname, mri_name, subject_from, scale, subjects_dir): """Scale a transform.""" subjects_dir, subject_from, scale, _ = _scale_params( subject_to, subject_from, scale, subjects_dir) # The nibabel warning should already be there in MRI step, if applicable, # as we only get here if T1.mgz is present (and thus a scaling was # attempted) so we can silently return here. if not has_nibabel(): return fname_from = os.path.join( mri_transforms_dirname.format( subjects_dir=subjects_dir, subject=subject_from), xfm_fname) fname_to = op.join( mri_transforms_dirname.format( subjects_dir=subjects_dir, subject=subject_to), xfm_fname) assert op.isfile(fname_from), fname_from assert op.isdir(op.dirname(fname_to)), op.dirname(fname_to) # The "talairach.xfm" file stores the ras_mni transform. # # For "from" subj F, "to" subj T, F->T scaling S, some equivalent vertex # positions F_x and T_x in MRI (Freesurfer RAS) coords, knowing that # we have T_x = S @ F_x, we want to have the same MNI coords computed # for these vertices: # # T_mri_mni @ T_x = F_mri_mni @ F_x # # We need to find the correct T_ras_mni (talaraich.xfm file) that yields # this. So we derive (where † indicates inversion): # # T_mri_mni @ S @ F_x = F_mri_mni @ F_x # T_mri_mni @ S = F_mri_mni # T_ras_mni @ T_mri_ras @ S = F_ras_mni @ F_mri_ras # T_ras_mni @ T_mri_ras = F_ras_mni @ F_mri_ras @ S⁻¹ # T_ras_mni = F_ras_mni @ F_mri_ras @ S⁻¹ @ T_ras_mri # # prepare the scale (S) transform scale = np.atleast_1d(scale) scale = np.tile(scale, 3) if len(scale) == 1 else scale S = Transform('mri', 'mri', scaling(*scale)) # F_mri->T_mri # # Get the necessary transforms of the "from" subject # xfm, kind = _read_fs_xfm(fname_from) assert kind == 'MNI Transform File', kind _, _, F_mri_ras, _, _ = _read_mri_info(mri_name, units='mm') F_ras_mni = Transform('ras', 'mni_tal', xfm) del xfm # # Get the necessary transforms of the "to" subject # mri_name = op.join(mri_dirname.format( subjects_dir=subjects_dir, subject=subject_to), op.basename(mri_name)) _, _, T_mri_ras, _, _ = _read_mri_info(mri_name, units='mm') T_ras_mri = invert_transform(T_mri_ras) del mri_name, T_mri_ras # Finally we construct as above: # # T_ras_mni = F_ras_mni @ F_mri_ras @ S⁻¹ @ T_ras_mri # # By moving right to left through the equation. T_ras_mni = \ combine_transforms( combine_transforms( combine_transforms( T_ras_mri, invert_transform(S), 'ras', 'mri'), F_mri_ras, 'ras', 'ras'), F_ras_mni, 'ras', 'mni_tal') _write_fs_xfm(fname_to, T_ras_mni['trans'], kind) @verbose def get_mni_fiducials(subject, subjects_dir=None, verbose=None): """Estimate fiducials for a subject. Parameters ---------- %(subject)s %(subjects_dir)s %(verbose)s Returns ------- fids_mri : list List of estimated fiducials (each point in a dict), in the order LPA, nasion, RPA. Notes ----- This takes the ``fsaverage-fiducials.fif`` file included with MNE—which contain the LPA, nasion, and RPA for the ``fsaverage`` subject—and transforms them to the given FreeSurfer subject's MRI space. The MRI of ``fsaverage`` is already in MNI Talairach space, so applying the inverse of the given subject's MNI Talairach affine transformation (``$SUBJECTS_DIR/$SUBJECT/mri/transforms/talairach.xfm``) is used to estimate the subject's fiducial locations. For more details about the coordinate systems and transformations involved, see https://surfer.nmr.mgh.harvard.edu/fswiki/CoordinateSystems and :ref:`plot_source_alignment`. """ # Eventually we might want to allow using the MNI Talairach with-skull # transformation rather than the standard brain-based MNI Talaranch # transformation, and/or project the points onto the head surface # (if available). fname_fids_fs = os.path.join(os.path.dirname(__file__), 'data', 'fsaverage', 'fsaverage-fiducials.fif') # Read fsaverage fiducials file and subject Talairach. fids, coord_frame = read_fiducials(fname_fids_fs) assert coord_frame == FIFF.FIFFV_COORD_MRI if subject == 'fsaverage': return fids # special short-circuit for fsaverage mni_mri_t = invert_transform(read_talxfm(subject, subjects_dir)) for f in fids: f['r'] = apply_trans(mni_mri_t, f['r']) return fids
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Denis Engemann <denis.engemann@gmail.com> # Martin Luessi <mluessi@nmr.mgh.harvard.edu> # Eric Larson <larson.eric.d@gmail.com> # Robert Luke <mail@robertluke.net> # # License: Simplified BSD import os.path as op from functools import partial import numpy as np from numpy.testing import assert_array_equal, assert_equal import pytest import matplotlib import matplotlib.pyplot as plt from matplotlib.patches import Circle from mne import (read_evokeds, read_proj, make_fixed_length_events, Epochs, compute_proj_evoked, find_layout, pick_types, create_info, read_cov) from mne.io.proj import make_eeg_average_ref_proj, Projection from mne.io import read_raw_fif, read_info, RawArray from mne.io.constants import FIFF from mne.io.pick import pick_info, channel_indices_by_type from mne.io.compensator import get_current_comp from mne.channels import read_layout, make_dig_montage from mne.datasets import testing from mne.time_frequency.tfr import AverageTFR from mne.viz import plot_evoked_topomap, plot_projs_topomap, topomap from mne.viz.topomap import (_get_pos_outlines, _onselect, plot_topomap, plot_arrowmap, plot_psds_topomap) from mne.viz.utils import _find_peaks, _fake_click from mne.utils import requires_sklearn data_dir = testing.data_path(download=False) subjects_dir = op.join(data_dir, 'subjects') ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg-proj.fif') triux_fname = op.join(data_dir, 'SSS', 'TRIUX', 'triux_bmlhus_erm_raw.fif') base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data') evoked_fname = op.join(base_dir, 'test-ave.fif') raw_fname = op.join(base_dir, 'test_raw.fif') event_name = op.join(base_dir, 'test-eve.fif') ctf_fname = op.join(base_dir, 'test_ctf_comp_raw.fif') layout = read_layout('Vectorview-all') cov_fname = op.join(base_dir, 'test-cov.fif') def test_plot_topomap_interactive(): """Test interactive topomap projection plotting.""" evoked = read_evokeds(evoked_fname, baseline=(None, 0))[0] evoked.pick_types(meg='mag') evoked.info['projs'] = [] assert not evoked.proj evoked.add_proj(compute_proj_evoked(evoked, n_mag=1)) plt.close('all') fig = plt.figure() ax, canvas = fig.gca(), fig.canvas kwargs = dict(vmin=-240, vmax=240, times=[0.1], colorbar=False, axes=ax, res=8, time_unit='s') evoked.copy().plot_topomap(proj=False, **kwargs) canvas.draw() image_noproj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj=True, **kwargs) canvas.draw() image_proj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert not np.array_equal(image_noproj, image_proj) assert len(plt.get_fignums()) == 1 ax.clear() evoked.copy().plot_topomap(proj='interactive', **kwargs) canvas.draw() image_interactive = np.frombuffer(canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive) assert not np.array_equal(image_proj, image_interactive) assert len(plt.get_fignums()) == 2 proj_fig = plt.figure(plt.get_fignums()[-1]) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_proj, image_interactive_click) assert not np.array_equal(image_noproj, image_interactive_click) _fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data') canvas.draw() image_interactive_click = np.frombuffer( canvas.tostring_rgb(), dtype='uint8') assert_array_equal(image_noproj, image_interactive_click) assert not np.array_equal(image_proj, image_interactive_click) @testing.requires_testing_data def test_plot_projs_topomap(): """Test plot_projs_topomap.""" projs = read_proj(ecg_fname) info = read_info(raw_fname) fast_test = {"res": 8, "contours": 0, "sensors": False} plot_projs_topomap(projs, info=info, colorbar=True, **fast_test) plt.close('all') ax = plt.subplot(111) projs[3].plot_topomap(info) plot_projs_topomap(projs[:1], info, axes=ax, **fast_test) # test axes plt.close('all') triux_info = read_info(triux_fname) plot_projs_topomap(triux_info['projs'][-1:], triux_info, **fast_test) plt.close('all') plot_projs_topomap(triux_info['projs'][:1], triux_info, **fast_test) plt.close('all') eeg_avg = make_eeg_average_ref_proj(info) eeg_avg.plot_topomap(info, **fast_test) plt.close('all') # test vlims for vlim in ('joint', (-1, 1), (None, 0.5), (0.5, None), (None, None)): plot_projs_topomap(projs[:-1], info, vlim=vlim, colorbar=True) plt.close('all') eeg_proj = make_eeg_average_ref_proj(info) info_meg = pick_info(info, pick_types(info, meg=True, eeg=False)) with pytest.raises(ValueError, match='No channel names in info match p'): plot_projs_topomap([eeg_proj], info_meg) def test_plot_topomap_animation(capsys): """Test topomap plotting.""" # evoked evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) # Test animation _, anim = evoked.animate_topomap(ch_type='grad', times=[0, 0.1], butterfly=False, time_unit='s', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode local to 0' in out plt.close('all') @pytest.mark.filterwarnings('ignore:.*No contour levels.*:UserWarning') def test_plot_topomap_animation_nirs(fnirs_evoked, capsys): """Test topomap plotting for nirs data.""" fig, anim = fnirs_evoked.animate_topomap(ch_type='hbo', verbose='debug') anim._func(1) # _animate has to be tested separately on 'Agg' backend. out, _ = capsys.readouterr() assert 'Interpolation mode head to 0' in out assert len(fig.axes) == 2 plt.close('all') @pytest.mark.slowtest def test_plot_topomap_basic(monkeypatch): """Test basics of topomap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) res = 8 fast_test = dict(res=res, contours=0, sensors=False, time_unit='s') fast_test_noscale = dict(res=res, contours=0, sensors=False) ev_bad = evoked.copy().pick_types(meg=False, eeg=True) ev_bad.pick_channels(ev_bad.ch_names[:2]) plt_topomap = partial(ev_bad.plot_topomap, **fast_test) plt_topomap(times=ev_bad.times[:2] - 1e-6) # auto, plots EEG pytest.raises(ValueError, plt_topomap, ch_type='mag') pytest.raises(ValueError, plt_topomap, times=[-100]) # bad time pytest.raises(ValueError, plt_topomap, times=[[0]]) # bad time evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms') # extrapolation to the edges of the convex hull or the head circle evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='local') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head') evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res, contours=[-100, 0, 100], time_unit='ms', extrapolate='head', outlines='skirt') # extrapolation options when < 4 channels: temp_data = np.random.random(3) picks = channel_indices_by_type(evoked.info)['mag'][:3] info_sel = pick_info(evoked.info, picks) plot_topomap(temp_data, info_sel, extrapolate='local', res=res) plot_topomap(temp_data, info_sel, extrapolate='head', res=res) # make sure extrapolation works for 3 channels with border='mean' # (if extra points are placed incorrectly some of them have only # other extra points as neighbours and border='mean' fails) plot_topomap(temp_data, info_sel, extrapolate='local', border='mean', res=res) # border=0 and border='mean': # --------------------------- ch_pos = np.array(sum(([[0, 0, r], [r, 0, 0], [-r, 0, 0], [0, -r, 0], [0, r, 0]] for r in np.linspace(0.2, 1.0, 5)), [])) rng = np.random.RandomState(23) data = np.full(len(ch_pos), 5) + rng.randn(len(ch_pos)) info = create_info(len(ch_pos), 250, 'eeg') ch_pos_dict = {name: pos for name, pos in zip(info['ch_names'], ch_pos)} dig = make_dig_montage(ch_pos_dict, coord_frame='head') info.set_montage(dig) # border=0 ax, _ = plot_topomap(data, info, extrapolate='head', border=0, sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert np.abs(img_data[0, 0]) < 1.5 # border='mean' ax, _ = plot_topomap(data, info, extrapolate='head', border='mean', sphere=1) img_data = ax.get_array().data assert np.abs(img_data[31, 31] - data[0]) < 0.12 assert img_data[0, 0] > 5 # error when not numeric or str: error_msg = 'border must be an instance of numeric or str' with pytest.raises(TypeError, match=error_msg): plot_topomap(data, info, extrapolate='head', border=[1, 2, 3]) # error when str is not 'mean': error_msg = "The only allowed value is 'mean', but got 'fancy' instead." with pytest.raises(ValueError, match=error_msg): plot_topomap(data, info, extrapolate='head', border='fancy') # test channel placement when only 'grad' are picked: # --------------------------------------------------- info_grad = evoked.copy().pick('grad').info n_grads = len(info_grad['ch_names']) data = np.random.randn(n_grads) img, _ = plot_topomap(data, info_grad) # check that channels are scattered around x == 0 pos = img.axes.collections[-1].get_offsets() prop_channels_on_the_right = (pos[:, 0] > 0).mean() assert prop_channels_on_the_right < 0.6 # other: # ------ plt_topomap = partial(evoked.plot_topomap, **fast_test) plt.close('all') axes = [plt.subplot(221), plt.subplot(222)] plt_topomap(axes=axes, colorbar=False) plt.close('all') plt_topomap(times=[-0.1, 0.2]) plt.close('all') evoked_grad = evoked.copy().crop(0, 0).pick_types(meg='grad') mask = np.zeros((204, 1), bool) mask[[0, 3, 5, 6]] = True names = [] def proc_names(x): names.append(x) return x[4:] evoked_grad.plot_topomap(ch_type='grad', times=[0], mask=mask, show_names=proc_names, **fast_test) assert_equal(sorted(names), ['MEG 011x', 'MEG 012x', 'MEG 013x', 'MEG 014x']) mask = np.zeros_like(evoked.data, dtype=bool) mask[[1, 5], :] = True plt_topomap(ch_type='mag', outlines=None) times = [0.1] plt_topomap(times, ch_type='grad', mask=mask) plt_topomap(times, ch_type='planar1') plt_topomap(times, ch_type='planar2') plt_topomap(times, ch_type='grad', mask=mask, show_names=True, mask_params={'marker': 'x'}) plt.close('all') with pytest.raises(ValueError, match='number of seconds; got -'): plt_topomap(times, ch_type='eeg', average=-1e3) with pytest.raises(TypeError, match='number of seconds; got type'): plt_topomap(times, ch_type='eeg', average='x') p = plt_topomap(times, ch_type='grad', image_interp='bilinear', show_names=lambda x: x.replace('MEG', '')) subplot = [x for x in p.get_children() if 'Subplot' in str(type(x))] assert len(subplot) >= 1, [type(x) for x in p.get_children()] subplot = subplot[0] have_all = all('MEG' not in x.get_text() for x in subplot.get_children() if isinstance(x, matplotlib.text.Text)) assert have_all # Plot array for ch_type in ('mag', 'grad'): evoked_ = evoked.copy().pick_types(eeg=False, meg=ch_type) plot_topomap(evoked_.data[:, 0], evoked_.info, **fast_test_noscale) # fail with multiple channel types pytest.raises(ValueError, plot_topomap, evoked.data[0, :], evoked.info) # Test title def get_texts(p): return [x.get_text() for x in p.get_children() if isinstance(x, matplotlib.text.Text)] p = plt_topomap(times, ch_type='eeg', average=0.01) assert_equal(len(get_texts(p)), 0) p = plt_topomap(times, ch_type='eeg', title='Custom') texts = get_texts(p) assert_equal(len(texts), 1) assert_equal(texts[0], 'Custom') plt.close('all') # delaunay triangulation warning plt_topomap(times, ch_type='mag') # projs have already been applied pytest.raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag', proj='interactive', time_unit='s') # change to no-proj mode evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0), proj=False) fig1 = evoked.plot_topomap('interactive', 'mag', proj='interactive', **fast_test) _fake_click(fig1, fig1.axes[1], (0.5, 0.5)) # click slider data_max = np.max(fig1.axes[0].images[0]._A) fig2 = plt.gcf() _fake_click(fig2, fig2.axes[0], (0.075, 0.775)) # toggle projector # make sure projector gets toggled assert (np.max(fig1.axes[0].images[0]._A) != data_max) with monkeypatch.context() as m: # speed it up by not actually plotting m.setattr(topomap, '_plot_topomap', lambda *args, **kwargs: (None, None, None)) with pytest.warns(RuntimeWarning, match='More than 25 topomaps plots'): plot_evoked_topomap(evoked, [0.1] * 26, colorbar=False) pytest.raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6], time_unit='s') for ch in evoked.info['chs']: if ch['coil_type'] == FIFF.FIFFV_COIL_EEG: ch['loc'].fill(0) # Remove extra digitization point, so EEG digitization points # correspond with the EEG electrodes del evoked.info['dig'][85] # Plot skirt evoked.plot_topomap(times, ch_type='eeg', outlines='skirt', **fast_test) # Pass custom outlines without patch eeg_picks = pick_types(evoked.info, meg=False, eeg=True) pos, outlines = _get_pos_outlines(evoked.info, eeg_picks, 0.1) evoked.plot_topomap(times, ch_type='eeg', outlines=outlines, **fast_test) plt.close('all') # Test interactive cmap fig = plot_evoked_topomap(evoked, times=[0., 0.1], ch_type='eeg', cmap=('Reds', True), title='title', **fast_test) fig.canvas.key_press_event('up') fig.canvas.key_press_event(' ') fig.canvas.key_press_event('down') cbar = fig.get_axes()[0].CB # Fake dragging with mouse. ax = cbar.cbar.ax _fake_click(fig, ax, (0.1, 0.1)) _fake_click(fig, ax, (0.1, 0.2), kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') _fake_click(fig, ax, (0.1, 0.1), button=3) _fake_click(fig, ax, (0.1, 0.2), button=3, kind='motion') _fake_click(fig, ax, (0.1, 0.3), kind='release') fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up plt.close('all') # Pass custom outlines with patch callable def patch(): return Circle((0.5, 0.4687), radius=.46, clip_on=True, transform=plt.gca().transAxes) outlines['patch'] = patch plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines, **fast_test) # Remove digitization points. Now topomap should fail evoked.info['dig'] = None pytest.raises(RuntimeError, plot_evoked_topomap, evoked, times, ch_type='eeg', time_unit='s') plt.close('all') # Error for missing names n_channels = len(pos) data = np.ones(n_channels) pytest.raises(ValueError, plot_topomap, data, pos, show_names=True) # Test error messages for invalid pos parameter pos_1d = np.zeros(n_channels) pos_3d = np.zeros((n_channels, 2, 2)) pytest.raises(ValueError, plot_topomap, data, pos_1d) pytest.raises(ValueError, plot_topomap, data, pos_3d) pytest.raises(ValueError, plot_topomap, data, pos[:3, :]) pos_x = pos[:, :1] pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]] pytest.raises(ValueError, plot_topomap, data, pos_x) pytest.raises(ValueError, plot_topomap, data, pos_xyz) # An #channels x 4 matrix should work though. In this case (x, y, width, # height) is assumed. pos_xywh = np.c_[pos, np.zeros((n_channels, 2))] plot_topomap(data, pos_xywh) plt.close('all') # Test peak finder axes = [plt.subplot(131), plt.subplot(132)] evoked.plot_topomap(times='peaks', axes=axes, **fast_test) plt.close('all') evoked.data = np.zeros(evoked.data.shape) evoked.data[50][1] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[1]) evoked.data[80][100] = 1 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]]) evoked.data[2][95] = 2 assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]]) assert_array_equal(_find_peaks(evoked, 1), evoked.times[95]) # Test excluding bads channels evoked_grad.info['bads'] += [evoked_grad.info['ch_names'][0]] orig_bads = evoked_grad.info['bads'] evoked_grad.plot_topomap(ch_type='grad', times=[0], time_unit='ms') assert_array_equal(evoked_grad.info['bads'], orig_bads) plt.close('all') def test_plot_tfr_topomap(): """Test plotting of TFR data.""" raw = read_raw_fif(raw_fname) times = np.linspace(-0.1, 0.1, 200) res = 8 n_freqs = 3 nave = 1 rng = np.random.RandomState(42) picks = [93, 94, 96, 97, 21, 22, 24, 25, 129, 130, 315, 316, 2, 5, 8, 11] info = pick_info(raw.info, picks) data = rng.randn(len(picks), n_freqs, len(times)) tfr = AverageTFR(info, data, times, np.arange(n_freqs), nave) tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10, res=res, contours=0) eclick = matplotlib.backend_bases.MouseEvent( 'button_press_event', plt.gcf().canvas, 0, 0, 1) eclick.xdata = eclick.ydata = 0.1 eclick.inaxes = plt.gca() erelease = matplotlib.backend_bases.MouseEvent( 'button_release_event', plt.gcf().canvas, 0.9, 0.9, 1) erelease.xdata = 0.3 erelease.ydata = 0.2 pos = np.array([[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]]) _onselect(eclick, erelease, tfr, pos, 'grad', 1, 3, 1, 3, 'RdBu_r', list()) _onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list()) eclick.xdata = eclick.ydata = 0. erelease.xdata = erelease.ydata = 0.9 tfr._onselect(eclick, erelease, None, 'mean', None) plt.close('all') # test plot_psds_topomap info = raw.info.copy() chan_inds = channel_indices_by_type(info) info = pick_info(info, chan_inds['grad'][:4]) fig, axes = plt.subplots() freqs = np.arange(3., 9.5) bands = [(4, 8, 'Theta')] psd = np.random.rand(len(info['ch_names']), freqs.shape[0]) plot_psds_topomap(psd, freqs, info, bands=bands, axes=[axes]) def test_ctf_plotting(): """Test CTF topomap plotting.""" raw = read_raw_fif(ctf_fname, preload=True) assert raw.compensation_grade == 3 events = make_fixed_length_events(raw, duration=0.01) assert len(events) > 10 evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average() assert get_current_comp(evoked.info) == 3 # smoke test that compensation does not matter evoked.plot_topomap(time_unit='s') # better test that topomaps can still be used without plotting ref evoked.pick_types(meg=True, ref_meg=False) evoked.plot_topomap() @pytest.mark.slowtest # can be slow on OSX @testing.requires_testing_data def test_plot_arrowmap(): """Test arrowmap plotting.""" evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked.data[:, 0], evoked.info) evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True) with pytest.raises(ValueError, match='Multiple channel types'): plot_arrowmap(evoked_eeg.data[:, 0], evoked.info) evoked_mag = evoked.copy().pick_types(meg='mag') evoked_grad = evoked.copy().pick_types(meg='grad') plot_arrowmap(evoked_mag.data[:, 0], evoked_mag.info) plot_arrowmap(evoked_grad.data[:, 0], evoked_grad.info, info_to=evoked_mag.info) @testing.requires_testing_data def test_plot_topomap_neuromag122(): """Test topomap plotting.""" res = 8 fast_test = dict(res=res, contours=0, sensors=False) evoked = read_evokeds(evoked_fname, 'Left Auditory', baseline=(None, 0)) evoked.pick_types(meg='grad') evoked.pick_channels(evoked.ch_names[:122]) ch_names = ['MEG %03d' % k for k in range(1, 123)] for c in evoked.info['chs']: c['coil_type'] = FIFF.FIFFV_COIL_NM_122 evoked.rename_channels({c_old: c_new for (c_old, c_new) in zip(evoked.ch_names, ch_names)}) layout = find_layout(evoked.info) assert layout.kind.startswith('Neuromag_122') evoked.plot_topomap(times=[0.1], **fast_test) proj = Projection(active=False, desc="test", kind=1, data=dict(nrow=1, ncol=122, row_names=None, col_names=evoked.ch_names, data=np.ones(122)), explained_var=0.5) plot_projs_topomap([proj], evoked.info, **fast_test) def test_plot_topomap_bads(): """Test plotting topomap with bad channels (gh-7213).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(3, 1000) raw = RawArray(data, create_info(3, 1000., 'eeg')) ch_pos_dict = {name: pos for name, pos in zip(raw.ch_names, np.eye(3))} raw.info.set_montage(make_dig_montage(ch_pos_dict, coord_frame='head')) for count in range(3): raw.info['bads'] = raw.ch_names[:count] raw.info._check_consistency() plot_topomap(data[:, 0], raw.info) plt.close('all') def test_plot_topomap_bads_grad(): """Test plotting topomap with bad gradiometer channels (gh-8802).""" import matplotlib.pyplot as plt data = np.random.RandomState(0).randn(203) info = read_info(evoked_fname) info['bads'] = ['MEG 2242'] picks = pick_types(info, meg='grad') info = pick_info(info, picks) assert len(info['chs']) == 203 plot_topomap(data, info, res=8) plt.close('all') def test_plot_topomap_nirs_overlap(fnirs_epochs): """Test plotting nirs topomap with overlapping channels (gh-7414).""" fig = fnirs_epochs['A'].average(picks='hbo').plot_topomap() assert len(fig.axes) == 5 plt.close('all') @requires_sklearn def test_plot_topomap_nirs_ica(fnirs_epochs): """Test plotting nirs ica topomap.""" from mne.preprocessing import ICA fnirs_epochs = fnirs_epochs.load_data().pick(picks='hbo') fnirs_epochs = fnirs_epochs.pick(picks=range(30)) ica = ICA().fit(fnirs_epochs) fig = ica.plot_components() assert len(fig[0].axes) == 20 plt.close('all') def test_plot_cov_topomap(): """Test plotting a covariance topomap.""" cov = read_cov(cov_fname) info = read_info(evoked_fname) cov.plot_topomap(info) cov.plot_topomap(info, noise_cov=cov) plt.close('all')
wmvanvliet/mne-python
mne/viz/tests/test_topomap.py
mne/coreg.py
"""Icon helper methods.""" from typing import Optional def icon_for_battery_level( battery_level: Optional[int] = None, charging: bool = False ) -> str: """Return a battery icon valid identifier.""" icon = "mdi:battery" if battery_level is None: return f"{icon}-unknown" if charging and battery_level > 10: icon += "-charging-{}".format(int(round(battery_level / 20 - 0.01)) * 20) elif charging: icon += "-outline" elif battery_level <= 5: icon += "-alert" elif 5 < battery_level < 95: icon += "-{}".format(int(round(battery_level / 10 - 0.01)) * 10) return icon def icon_for_signal_level(signal_level: Optional[int] = None) -> str: """Return a signal icon valid identifier.""" if signal_level is None or signal_level == 0: return "mdi:signal-cellular-outline" if signal_level > 70: return "mdi:signal-cellular-3" if signal_level > 30: return "mdi:signal-cellular-2" return "mdi:signal-cellular-1"
"""Tests for Intent component.""" import pytest from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.const import SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.helpers import intent from homeassistant.setup import async_setup_component from tests.common import async_mock_service async def test_http_handle_intent(hass, hass_client, hass_admin_user): """Test handle intent via HTTP API.""" class TestIntentHandler(intent.IntentHandler): """Test Intent Handler.""" intent_type = "OrderBeer" async def async_handle(self, intent): """Handle the intent.""" assert intent.context.user_id == hass_admin_user.id response = intent.create_response() response.async_set_speech( "I've ordered a {}!".format(intent.slots["type"]["value"]) ) response.async_set_card( "Beer ordered", "You chose a {}.".format(intent.slots["type"]["value"]) ) return response intent.async_register(hass, TestIntentHandler()) result = await async_setup_component(hass, "intent", {}) assert result client = await hass_client() resp = await client.post( "/api/intent/handle", json={"name": "OrderBeer", "data": {"type": "Belgian"}} ) assert resp.status == 200 data = await resp.json() assert data == { "card": { "simple": {"content": "You chose a Belgian.", "title": "Beer ordered"} }, "speech": {"plain": {"extra_data": None, "speech": "I've ordered a Belgian!"}}, } async def test_cover_intents_loading(hass): """Test Cover Intents Loading.""" assert await async_setup_component(hass, "intent", {}) with pytest.raises(intent.UnknownIntent): await intent.async_handle( hass, "test", "HassOpenCover", {"name": {"value": "garage door"}} ) assert await async_setup_component(hass, "cover", {}) hass.states.async_set("cover.garage_door", "closed") calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) response = await intent.async_handle( hass, "test", "HassOpenCover", {"name": {"value": "garage door"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Opened garage door" assert len(calls) == 1 call = calls[0] assert call.domain == "cover" assert call.service == "open_cover" assert call.data == {"entity_id": "cover.garage_door"} async def test_turn_on_intent(hass): """Test HassTurnOn intent.""" result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "off") calls = async_mock_service(hass, "light", SERVICE_TURN_ON) response = await intent.async_handle( hass, "test", "HassTurnOn", {"name": {"value": "test light"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Turned test light on" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "turn_on" assert call.data == {"entity_id": ["light.test_light"]} async def test_turn_off_intent(hass): """Test HassTurnOff intent.""" result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "on") calls = async_mock_service(hass, "light", SERVICE_TURN_OFF) response = await intent.async_handle( hass, "test", "HassTurnOff", {"name": {"value": "test light"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Turned test light off" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "turn_off" assert call.data == {"entity_id": ["light.test_light"]} async def test_toggle_intent(hass): """Test HassToggle intent.""" result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "off") calls = async_mock_service(hass, "light", SERVICE_TOGGLE) response = await intent.async_handle( hass, "test", "HassToggle", {"name": {"value": "test light"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Toggled test light" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "toggle" assert call.data == {"entity_id": ["light.test_light"]} async def test_turn_on_multiple_intent(hass): """Test HassTurnOn intent with multiple similar entities. This tests that matching finds the proper entity among similar names. """ result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "off") hass.states.async_set("light.test_lights_2", "off") hass.states.async_set("light.test_lighter", "off") calls = async_mock_service(hass, "light", SERVICE_TURN_ON) response = await intent.async_handle( hass, "test", "HassTurnOn", {"name": {"value": "test lights"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Turned test lights 2 on" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "turn_on" assert call.data == {"entity_id": ["light.test_lights_2"]}
nkgilley/home-assistant
tests/components/intent/test_init.py
homeassistant/helpers/icon.py
"""Provide a registry to track entity IDs. The Entity Registry keeps a registry of entities. Entities are uniquely identified by their domain, platform and a unique id provided by that platform. The Entity Registry will persist itself 10 seconds after a new entity is registered. Registering a new entity while a timer is in progress resets the timer. """ from collections import OrderedDict import logging from typing import ( TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Tuple, cast, ) import attr from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_SUPPORTED_FEATURES, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, STATE_UNAVAILABLE, ) from homeassistant.core import Event, callback, split_entity_id, valid_entity_id from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED from homeassistant.util import slugify from homeassistant.util.yaml import load_yaml from .singleton import singleton from .typing import HomeAssistantType if TYPE_CHECKING: from homeassistant.config_entries import ConfigEntry # noqa: F401 # mypy: allow-untyped-defs, no-check-untyped-defs PATH_REGISTRY = "entity_registry.yaml" DATA_REGISTRY = "entity_registry" EVENT_ENTITY_REGISTRY_UPDATED = "entity_registry_updated" SAVE_DELAY = 10 _LOGGER = logging.getLogger(__name__) _UNDEF = object() DISABLED_CONFIG_ENTRY = "config_entry" DISABLED_HASS = "hass" DISABLED_USER = "user" DISABLED_INTEGRATION = "integration" ATTR_RESTORED = "restored" STORAGE_VERSION = 1 STORAGE_KEY = "core.entity_registry" # Attributes relevant to describing entity # to external services. ENTITY_DESCRIBING_ATTRIBUTES = { "entity_id", "name", "original_name", "capabilities", "supported_features", "device_class", "unit_of_measurement", } @attr.s(slots=True, frozen=True) class RegistryEntry: """Entity Registry Entry.""" entity_id = attr.ib(type=str) unique_id = attr.ib(type=str) platform = attr.ib(type=str) name = attr.ib(type=str, default=None) icon = attr.ib(type=str, default=None) device_id: Optional[str] = attr.ib(default=None) config_entry_id: Optional[str] = attr.ib(default=None) disabled_by = attr.ib( type=Optional[str], default=None, validator=attr.validators.in_( ( DISABLED_HASS, DISABLED_USER, DISABLED_INTEGRATION, DISABLED_CONFIG_ENTRY, None, ) ), ) capabilities: Optional[Dict[str, Any]] = attr.ib(default=None) supported_features: int = attr.ib(default=0) device_class: Optional[str] = attr.ib(default=None) unit_of_measurement: Optional[str] = attr.ib(default=None) # As set by integration original_name: Optional[str] = attr.ib(default=None) original_icon: Optional[str] = attr.ib(default=None) domain = attr.ib(type=str, init=False, repr=False) @domain.default def _domain_default(self) -> str: """Compute domain value.""" return split_entity_id(self.entity_id)[0] @property def disabled(self) -> bool: """Return if entry is disabled.""" return self.disabled_by is not None class EntityRegistry: """Class to hold a registry of entities.""" def __init__(self, hass: HomeAssistantType): """Initialize the registry.""" self.hass = hass self.entities: Dict[str, RegistryEntry] self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) self.hass.bus.async_listen( EVENT_DEVICE_REGISTRY_UPDATED, self.async_device_removed ) @callback def async_get_device_class_lookup(self, domain_device_classes: set) -> dict: """Return a lookup for the device class by domain.""" lookup: Dict[str, Dict[Tuple[Any, Any], str]] = {} for entity in self.entities.values(): if not entity.device_id: continue domain_device_class = (entity.domain, entity.device_class) if domain_device_class not in domain_device_classes: continue if entity.device_id not in lookup: lookup[entity.device_id] = {domain_device_class: entity.entity_id} else: lookup[entity.device_id][domain_device_class] = entity.entity_id return lookup @callback def async_is_registered(self, entity_id: str) -> bool: """Check if an entity_id is currently registered.""" return entity_id in self.entities @callback def async_get(self, entity_id: str) -> Optional[RegistryEntry]: """Get EntityEntry for an entity_id.""" return self.entities.get(entity_id) @callback def async_get_entity_id( self, domain: str, platform: str, unique_id: str ) -> Optional[str]: """Check if an entity_id is currently registered.""" for entity in self.entities.values(): if ( entity.domain == domain and entity.platform == platform and entity.unique_id == unique_id ): return entity.entity_id return None @callback def async_generate_entity_id( self, domain: str, suggested_object_id: str, known_object_ids: Optional[Iterable[str]] = None, ) -> str: """Generate an entity ID that does not conflict. Conflicts checked against registered and currently existing entities. """ preferred_string = f"{domain}.{slugify(suggested_object_id)}" test_string = preferred_string if not known_object_ids: known_object_ids = {} tries = 1 while ( test_string in self.entities or test_string in known_object_ids or self.hass.states.get(test_string) ): tries += 1 test_string = f"{preferred_string}_{tries}" return test_string @callback def async_get_or_create( self, domain: str, platform: str, unique_id: str, *, # To influence entity ID generation suggested_object_id: Optional[str] = None, known_object_ids: Optional[Iterable[str]] = None, # To disable an entity if it gets created disabled_by: Optional[str] = None, # Data that we want entry to have config_entry: Optional["ConfigEntry"] = None, device_id: Optional[str] = None, capabilities: Optional[Dict[str, Any]] = None, supported_features: Optional[int] = None, device_class: Optional[str] = None, unit_of_measurement: Optional[str] = None, original_name: Optional[str] = None, original_icon: Optional[str] = None, ) -> RegistryEntry: """Get entity. Create if it doesn't exist.""" config_entry_id = None if config_entry: config_entry_id = config_entry.entry_id entity_id = self.async_get_entity_id(domain, platform, unique_id) if entity_id: return self._async_update_entity( # type: ignore entity_id, config_entry_id=config_entry_id or _UNDEF, device_id=device_id or _UNDEF, capabilities=capabilities or _UNDEF, supported_features=supported_features or _UNDEF, device_class=device_class or _UNDEF, unit_of_measurement=unit_of_measurement or _UNDEF, original_name=original_name or _UNDEF, original_icon=original_icon or _UNDEF, # When we changed our slugify algorithm, we invalidated some # stored entity IDs with either a __ or ending in _. # Fix introduced in 0.86 (Jan 23, 2019). Next line can be # removed when we release 1.0 or in 2020. new_entity_id=".".join( slugify(part) for part in entity_id.split(".", 1) ), ) entity_id = self.async_generate_entity_id( domain, suggested_object_id or f"{platform}_{unique_id}", known_object_ids ) if ( disabled_by is None and config_entry and config_entry.system_options.disable_new_entities ): disabled_by = DISABLED_INTEGRATION entity = RegistryEntry( entity_id=entity_id, config_entry_id=config_entry_id, device_id=device_id, unique_id=unique_id, platform=platform, disabled_by=disabled_by, capabilities=capabilities, supported_features=supported_features or 0, device_class=device_class, unit_of_measurement=unit_of_measurement, original_name=original_name, original_icon=original_icon, ) self.entities[entity_id] = entity _LOGGER.info("Registered new %s.%s entity: %s", domain, platform, entity_id) self.async_schedule_save() self.hass.bus.async_fire( EVENT_ENTITY_REGISTRY_UPDATED, {"action": "create", "entity_id": entity_id} ) return entity @callback def async_remove(self, entity_id: str) -> None: """Remove an entity from registry.""" self.entities.pop(entity_id) self.hass.bus.async_fire( EVENT_ENTITY_REGISTRY_UPDATED, {"action": "remove", "entity_id": entity_id} ) self.async_schedule_save() @callback def async_device_removed(self, event: Event) -> None: """Handle the removal of a device. Remove entities from the registry that are associated to a device when the device is removed. """ if event.data["action"] != "remove": return entities = async_entries_for_device(self, event.data["device_id"]) for entity in entities: self.async_remove(entity.entity_id) @callback def async_update_entity( self, entity_id, *, name=_UNDEF, icon=_UNDEF, new_entity_id=_UNDEF, new_unique_id=_UNDEF, disabled_by=_UNDEF, ): """Update properties of an entity.""" return cast( # cast until we have _async_update_entity type hinted RegistryEntry, self._async_update_entity( entity_id, name=name, icon=icon, new_entity_id=new_entity_id, new_unique_id=new_unique_id, disabled_by=disabled_by, ), ) @callback def _async_update_entity( self, entity_id, *, name=_UNDEF, icon=_UNDEF, config_entry_id=_UNDEF, new_entity_id=_UNDEF, device_id=_UNDEF, new_unique_id=_UNDEF, disabled_by=_UNDEF, capabilities=_UNDEF, supported_features=_UNDEF, device_class=_UNDEF, unit_of_measurement=_UNDEF, original_name=_UNDEF, original_icon=_UNDEF, ): """Private facing update properties method.""" old = self.entities[entity_id] changes = {} for attr_name, value in ( ("name", name), ("icon", icon), ("config_entry_id", config_entry_id), ("device_id", device_id), ("disabled_by", disabled_by), ("capabilities", capabilities), ("supported_features", supported_features), ("device_class", device_class), ("unit_of_measurement", unit_of_measurement), ("original_name", original_name), ("original_icon", original_icon), ): if value is not _UNDEF and value != getattr(old, attr_name): changes[attr_name] = value if new_entity_id is not _UNDEF and new_entity_id != old.entity_id: if self.async_is_registered(new_entity_id): raise ValueError("Entity is already registered") if not valid_entity_id(new_entity_id): raise ValueError("Invalid entity ID") if split_entity_id(new_entity_id)[0] != split_entity_id(entity_id)[0]: raise ValueError("New entity ID should be same domain") self.entities.pop(entity_id) entity_id = changes["entity_id"] = new_entity_id if new_unique_id is not _UNDEF: conflict = next( ( entity for entity in self.entities.values() if entity.unique_id == new_unique_id and entity.domain == old.domain and entity.platform == old.platform ), None, ) if conflict: raise ValueError( f"Unique id '{new_unique_id}' is already in use by " f"'{conflict.entity_id}'" ) changes["unique_id"] = new_unique_id if not changes: return old new = self.entities[entity_id] = attr.evolve(old, **changes) self.async_schedule_save() data = {"action": "update", "entity_id": entity_id, "changes": list(changes)} if old.entity_id != entity_id: data["old_entity_id"] = old.entity_id self.hass.bus.async_fire(EVENT_ENTITY_REGISTRY_UPDATED, data) return new async def async_load(self) -> None: """Load the entity registry.""" async_setup_entity_restore(self.hass, self) data = await self.hass.helpers.storage.async_migrator( self.hass.config.path(PATH_REGISTRY), self._store, old_conf_load_func=load_yaml, old_conf_migrate_func=_async_migrate, ) entities: Dict[str, RegistryEntry] = OrderedDict() if data is not None: for entity in data["entities"]: # Some old installations can have some bad entities. # Filter them out as they cause errors down the line. # Can be removed in Jan 2021 if not valid_entity_id(entity["entity_id"]): continue entities[entity["entity_id"]] = RegistryEntry( entity_id=entity["entity_id"], config_entry_id=entity.get("config_entry_id"), device_id=entity.get("device_id"), unique_id=entity["unique_id"], platform=entity["platform"], name=entity.get("name"), icon=entity.get("icon"), disabled_by=entity.get("disabled_by"), capabilities=entity.get("capabilities") or {}, supported_features=entity.get("supported_features", 0), device_class=entity.get("device_class"), unit_of_measurement=entity.get("unit_of_measurement"), original_name=entity.get("original_name"), original_icon=entity.get("original_icon"), ) self.entities = entities @callback def async_schedule_save(self) -> None: """Schedule saving the entity registry.""" self._store.async_delay_save(self._data_to_save, SAVE_DELAY) @callback def _data_to_save(self) -> Dict[str, Any]: """Return data of entity registry to store in a file.""" data = {} data["entities"] = [ { "entity_id": entry.entity_id, "config_entry_id": entry.config_entry_id, "device_id": entry.device_id, "unique_id": entry.unique_id, "platform": entry.platform, "name": entry.name, "icon": entry.icon, "disabled_by": entry.disabled_by, "capabilities": entry.capabilities, "supported_features": entry.supported_features, "device_class": entry.device_class, "unit_of_measurement": entry.unit_of_measurement, "original_name": entry.original_name, "original_icon": entry.original_icon, } for entry in self.entities.values() ] return data @callback def async_clear_config_entry(self, config_entry: str) -> None: """Clear config entry from registry entries.""" for entity_id in [ entity_id for entity_id, entry in self.entities.items() if config_entry == entry.config_entry_id ]: self.async_remove(entity_id) @singleton(DATA_REGISTRY) async def async_get_registry(hass: HomeAssistantType) -> EntityRegistry: """Create entity registry.""" reg = EntityRegistry(hass) await reg.async_load() return reg @callback def async_entries_for_device( registry: EntityRegistry, device_id: str ) -> List[RegistryEntry]: """Return entries that match a device.""" return [ entry for entry in registry.entities.values() if entry.device_id == device_id ] @callback def async_entries_for_config_entry( registry: EntityRegistry, config_entry_id: str ) -> List[RegistryEntry]: """Return entries that match a config entry.""" return [ entry for entry in registry.entities.values() if entry.config_entry_id == config_entry_id ] async def _async_migrate(entities: Dict[str, Any]) -> Dict[str, List[Dict[str, Any]]]: """Migrate the YAML config file to storage helper format.""" return { "entities": [ {"entity_id": entity_id, **info} for entity_id, info in entities.items() ] } @callback def async_setup_entity_restore( hass: HomeAssistantType, registry: EntityRegistry ) -> None: """Set up the entity restore mechanism.""" @callback def cleanup_restored_states(event: Event) -> None: """Clean up restored states.""" if event.data["action"] != "remove": return state = hass.states.get(event.data["entity_id"]) if state is None or not state.attributes.get(ATTR_RESTORED): return hass.states.async_remove(event.data["entity_id"], context=event.context) hass.bus.async_listen(EVENT_ENTITY_REGISTRY_UPDATED, cleanup_restored_states) if hass.is_running: return @callback def _write_unavailable_states(_: Event) -> None: """Make sure state machine contains entry for each registered entity.""" states = hass.states existing = set(states.async_entity_ids()) for entry in registry.entities.values(): if entry.entity_id in existing or entry.disabled: continue attrs: Dict[str, Any] = {ATTR_RESTORED: True} if entry.capabilities is not None: attrs.update(entry.capabilities) if entry.supported_features is not None: attrs[ATTR_SUPPORTED_FEATURES] = entry.supported_features if entry.device_class is not None: attrs[ATTR_DEVICE_CLASS] = entry.device_class if entry.unit_of_measurement is not None: attrs[ATTR_UNIT_OF_MEASUREMENT] = entry.unit_of_measurement name = entry.name or entry.original_name if name is not None: attrs[ATTR_FRIENDLY_NAME] = name icon = entry.icon or entry.original_icon if icon is not None: attrs[ATTR_ICON] = icon states.async_set(entry.entity_id, STATE_UNAVAILABLE, attrs) hass.bus.async_listen(EVENT_HOMEASSISTANT_START, _write_unavailable_states) async def async_migrate_entries( hass: HomeAssistantType, config_entry_id: str, entry_callback: Callable[[RegistryEntry], Optional[dict]], ) -> None: """Migrator of unique IDs.""" ent_reg = await async_get_registry(hass) for entry in ent_reg.entities.values(): if entry.config_entry_id != config_entry_id: continue updates = entry_callback(entry) if updates is not None: ent_reg.async_update_entity(entry.entity_id, **updates)
"""Tests for Intent component.""" import pytest from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.const import SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.helpers import intent from homeassistant.setup import async_setup_component from tests.common import async_mock_service async def test_http_handle_intent(hass, hass_client, hass_admin_user): """Test handle intent via HTTP API.""" class TestIntentHandler(intent.IntentHandler): """Test Intent Handler.""" intent_type = "OrderBeer" async def async_handle(self, intent): """Handle the intent.""" assert intent.context.user_id == hass_admin_user.id response = intent.create_response() response.async_set_speech( "I've ordered a {}!".format(intent.slots["type"]["value"]) ) response.async_set_card( "Beer ordered", "You chose a {}.".format(intent.slots["type"]["value"]) ) return response intent.async_register(hass, TestIntentHandler()) result = await async_setup_component(hass, "intent", {}) assert result client = await hass_client() resp = await client.post( "/api/intent/handle", json={"name": "OrderBeer", "data": {"type": "Belgian"}} ) assert resp.status == 200 data = await resp.json() assert data == { "card": { "simple": {"content": "You chose a Belgian.", "title": "Beer ordered"} }, "speech": {"plain": {"extra_data": None, "speech": "I've ordered a Belgian!"}}, } async def test_cover_intents_loading(hass): """Test Cover Intents Loading.""" assert await async_setup_component(hass, "intent", {}) with pytest.raises(intent.UnknownIntent): await intent.async_handle( hass, "test", "HassOpenCover", {"name": {"value": "garage door"}} ) assert await async_setup_component(hass, "cover", {}) hass.states.async_set("cover.garage_door", "closed") calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) response = await intent.async_handle( hass, "test", "HassOpenCover", {"name": {"value": "garage door"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Opened garage door" assert len(calls) == 1 call = calls[0] assert call.domain == "cover" assert call.service == "open_cover" assert call.data == {"entity_id": "cover.garage_door"} async def test_turn_on_intent(hass): """Test HassTurnOn intent.""" result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "off") calls = async_mock_service(hass, "light", SERVICE_TURN_ON) response = await intent.async_handle( hass, "test", "HassTurnOn", {"name": {"value": "test light"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Turned test light on" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "turn_on" assert call.data == {"entity_id": ["light.test_light"]} async def test_turn_off_intent(hass): """Test HassTurnOff intent.""" result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "on") calls = async_mock_service(hass, "light", SERVICE_TURN_OFF) response = await intent.async_handle( hass, "test", "HassTurnOff", {"name": {"value": "test light"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Turned test light off" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "turn_off" assert call.data == {"entity_id": ["light.test_light"]} async def test_toggle_intent(hass): """Test HassToggle intent.""" result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "off") calls = async_mock_service(hass, "light", SERVICE_TOGGLE) response = await intent.async_handle( hass, "test", "HassToggle", {"name": {"value": "test light"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Toggled test light" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "toggle" assert call.data == {"entity_id": ["light.test_light"]} async def test_turn_on_multiple_intent(hass): """Test HassTurnOn intent with multiple similar entities. This tests that matching finds the proper entity among similar names. """ result = await async_setup_component(hass, "homeassistant", {}) result = await async_setup_component(hass, "intent", {}) assert result hass.states.async_set("light.test_light", "off") hass.states.async_set("light.test_lights_2", "off") hass.states.async_set("light.test_lighter", "off") calls = async_mock_service(hass, "light", SERVICE_TURN_ON) response = await intent.async_handle( hass, "test", "HassTurnOn", {"name": {"value": "test lights"}} ) await hass.async_block_till_done() assert response.speech["plain"]["speech"] == "Turned test lights 2 on" assert len(calls) == 1 call = calls[0] assert call.domain == "light" assert call.service == "turn_on" assert call.data == {"entity_id": ["light.test_lights_2"]}
nkgilley/home-assistant
tests/components/intent/test_init.py
homeassistant/helpers/entity_registry.py
# flake8: noqa from pandas.core.reshape.concat import concat from pandas.core.reshape.melt import lreshape, melt, wide_to_long from pandas.core.reshape.merge import merge, merge_asof, merge_ordered from pandas.core.reshape.pivot import crosstab, pivot, pivot_table from pandas.core.reshape.reshape import get_dummies from pandas.core.reshape.tile import cut, qcut
"""SQL io tests The SQL tests are broken down in different classes: - `PandasSQLTest`: base class with common methods for all test classes - Tests for the public API (only tests with sqlite3) - `_TestSQLApi` base class - `TestSQLApi`: test the public API with sqlalchemy engine - `TestSQLiteFallbackApi`: test the public API with a sqlite DBAPI connection - Tests for the different SQL flavors (flavor specific type conversions) - Tests for the sqlalchemy mode: `_TestSQLAlchemy` is the base class with common methods, `_TestSQLAlchemyConn` tests the API with a SQLAlchemy Connection object. The different tested flavors (sqlite3, MySQL, PostgreSQL) derive from the base class - Tests for the fallback mode (`TestSQLiteFallback`) """ import csv from datetime import date, datetime, time from io import StringIO import sqlite3 import warnings import numpy as np import pytest from pandas.core.dtypes.common import is_datetime64_dtype, is_datetime64tz_dtype import pandas as pd from pandas import ( DataFrame, Index, MultiIndex, Series, Timestamp, concat, date_range, isna, to_datetime, to_timedelta, ) import pandas._testing as tm import pandas.io.sql as sql from pandas.io.sql import read_sql_query, read_sql_table try: import sqlalchemy from sqlalchemy.ext import declarative from sqlalchemy.orm import session as sa_session import sqlalchemy.schema import sqlalchemy.sql.sqltypes as sqltypes SQLALCHEMY_INSTALLED = True except ImportError: SQLALCHEMY_INSTALLED = False SQL_STRINGS = { "create_iris": { "sqlite": """CREATE TABLE iris ( "SepalLength" REAL, "SepalWidth" REAL, "PetalLength" REAL, "PetalWidth" REAL, "Name" TEXT )""", "mysql": """CREATE TABLE iris ( `SepalLength` DOUBLE, `SepalWidth` DOUBLE, `PetalLength` DOUBLE, `PetalWidth` DOUBLE, `Name` VARCHAR(200) )""", "postgresql": """CREATE TABLE iris ( "SepalLength" DOUBLE PRECISION, "SepalWidth" DOUBLE PRECISION, "PetalLength" DOUBLE PRECISION, "PetalWidth" DOUBLE PRECISION, "Name" VARCHAR(200) )""", }, "insert_iris": { "sqlite": """INSERT INTO iris VALUES(?, ?, ?, ?, ?)""", "mysql": """INSERT INTO iris VALUES(%s, %s, %s, %s, "%s");""", "postgresql": """INSERT INTO iris VALUES(%s, %s, %s, %s, %s);""", }, "create_test_types": { "sqlite": """CREATE TABLE types_test_data ( "TextCol" TEXT, "DateCol" TEXT, "IntDateCol" INTEGER, "IntDateOnlyCol" INTEGER, "FloatCol" REAL, "IntCol" INTEGER, "BoolCol" INTEGER, "IntColWithNull" INTEGER, "BoolColWithNull" INTEGER )""", "mysql": """CREATE TABLE types_test_data ( `TextCol` TEXT, `DateCol` DATETIME, `IntDateCol` INTEGER, `IntDateOnlyCol` INTEGER, `FloatCol` DOUBLE, `IntCol` INTEGER, `BoolCol` BOOLEAN, `IntColWithNull` INTEGER, `BoolColWithNull` BOOLEAN )""", "postgresql": """CREATE TABLE types_test_data ( "TextCol" TEXT, "DateCol" TIMESTAMP, "DateColWithTz" TIMESTAMP WITH TIME ZONE, "IntDateCol" INTEGER, "IntDateOnlyCol" INTEGER, "FloatCol" DOUBLE PRECISION, "IntCol" INTEGER, "BoolCol" BOOLEAN, "IntColWithNull" INTEGER, "BoolColWithNull" BOOLEAN )""", }, "insert_test_types": { "sqlite": { "query": """ INSERT INTO types_test_data VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?) """, "fields": ( "TextCol", "DateCol", "IntDateCol", "IntDateOnlyCol", "FloatCol", "IntCol", "BoolCol", "IntColWithNull", "BoolColWithNull", ), }, "mysql": { "query": """ INSERT INTO types_test_data VALUES("%s", %s, %s, %s, %s, %s, %s, %s, %s) """, "fields": ( "TextCol", "DateCol", "IntDateCol", "IntDateOnlyCol", "FloatCol", "IntCol", "BoolCol", "IntColWithNull", "BoolColWithNull", ), }, "postgresql": { "query": """ INSERT INTO types_test_data VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) """, "fields": ( "TextCol", "DateCol", "DateColWithTz", "IntDateCol", "IntDateOnlyCol", "FloatCol", "IntCol", "BoolCol", "IntColWithNull", "BoolColWithNull", ), }, }, "read_parameters": { "sqlite": "SELECT * FROM iris WHERE Name=? AND SepalLength=?", "mysql": 'SELECT * FROM iris WHERE `Name`="%s" AND `SepalLength`=%s', "postgresql": 'SELECT * FROM iris WHERE "Name"=%s AND "SepalLength"=%s', }, "read_named_parameters": { "sqlite": """ SELECT * FROM iris WHERE Name=:name AND SepalLength=:length """, "mysql": """ SELECT * FROM iris WHERE `Name`="%(name)s" AND `SepalLength`=%(length)s """, "postgresql": """ SELECT * FROM iris WHERE "Name"=%(name)s AND "SepalLength"=%(length)s """, }, "read_no_parameters_with_percent": { "sqlite": "SELECT * FROM iris WHERE Name LIKE '%'", "mysql": "SELECT * FROM iris WHERE `Name` LIKE '%'", "postgresql": "SELECT * FROM iris WHERE \"Name\" LIKE '%'", }, "create_view": { "sqlite": """ CREATE VIEW iris_view AS SELECT * FROM iris """ }, } class MixInBase: def teardown_method(self, method): # if setup fails, there may not be a connection to close. if hasattr(self, "conn"): for tbl in self._get_all_tables(): self.drop_table(tbl) self._close_conn() class MySQLMixIn(MixInBase): def drop_table(self, table_name): cur = self.conn.cursor() cur.execute(f"DROP TABLE IF EXISTS {sql._get_valid_mysql_name(table_name)}") self.conn.commit() def _get_all_tables(self): cur = self.conn.cursor() cur.execute("SHOW TABLES") return [table[0] for table in cur.fetchall()] def _close_conn(self): from pymysql.err import Error try: self.conn.close() except Error: pass class SQLiteMixIn(MixInBase): def drop_table(self, table_name): self.conn.execute( f"DROP TABLE IF EXISTS {sql._get_valid_sqlite_name(table_name)}" ) self.conn.commit() def _get_all_tables(self): c = self.conn.execute("SELECT name FROM sqlite_master WHERE type='table'") return [table[0] for table in c.fetchall()] def _close_conn(self): self.conn.close() class SQLAlchemyMixIn(MixInBase): def drop_table(self, table_name): sql.SQLDatabase(self.conn).drop_table(table_name) def _get_all_tables(self): meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() table_list = meta.tables.keys() return table_list def _close_conn(self): # https://docs.sqlalchemy.org/en/13/core/connections.html#engine-disposal self.conn.dispose() class PandasSQLTest: """ Base class with common private methods for SQLAlchemy and fallback cases. """ def _get_exec(self): if hasattr(self.conn, "execute"): return self.conn else: return self.conn.cursor() @pytest.fixture(params=[("io", "data", "csv", "iris.csv")]) def load_iris_data(self, datapath, request): iris_csv_file = datapath(*request.param) if not hasattr(self, "conn"): self.setup_connect() self.drop_table("iris") self._get_exec().execute(SQL_STRINGS["create_iris"][self.flavor]) with open(iris_csv_file, mode="r", newline=None) as iris_csv: r = csv.reader(iris_csv) next(r) # skip header row ins = SQL_STRINGS["insert_iris"][self.flavor] for row in r: self._get_exec().execute(ins, row) def _load_iris_view(self): self.drop_table("iris_view") self._get_exec().execute(SQL_STRINGS["create_view"][self.flavor]) def _check_iris_loaded_frame(self, iris_frame): pytype = iris_frame.dtypes[0].type row = iris_frame.iloc[0] assert issubclass(pytype, np.floating) tm.equalContents(row.values, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"]) def _load_test1_data(self): columns = ["index", "A", "B", "C", "D"] data = [ ( "2000-01-03 00:00:00", 0.980268513777, 3.68573087906, -0.364216805298, -1.15973806169, ), ( "2000-01-04 00:00:00", 1.04791624281, -0.0412318367011, -0.16181208307, 0.212549316967, ), ( "2000-01-05 00:00:00", 0.498580885705, 0.731167677815, -0.537677223318, 1.34627041952, ), ( "2000-01-06 00:00:00", 1.12020151869, 1.56762092543, 0.00364077397681, 0.67525259227, ), ] self.test_frame1 = DataFrame(data, columns=columns) def _load_test2_data(self): df = DataFrame( { "A": [4, 1, 3, 6], "B": ["asd", "gsq", "ylt", "jkl"], "C": [1.1, 3.1, 6.9, 5.3], "D": [False, True, True, False], "E": ["1990-11-22", "1991-10-26", "1993-11-26", "1995-12-12"], } ) df["E"] = to_datetime(df["E"]) self.test_frame2 = df def _load_test3_data(self): columns = ["index", "A", "B"] data = [ ("2000-01-03 00:00:00", 2 ** 31 - 1, -1.987670), ("2000-01-04 00:00:00", -29, -0.0412318367011), ("2000-01-05 00:00:00", 20000, 0.731167677815), ("2000-01-06 00:00:00", -290867, 1.56762092543), ] self.test_frame3 = DataFrame(data, columns=columns) def _load_types_test_data(self, data): def _filter_to_flavor(flavor, df): flavor_dtypes = { "sqlite": { "TextCol": "str", "DateCol": "str", "IntDateCol": "int64", "IntDateOnlyCol": "int64", "FloatCol": "float", "IntCol": "int64", "BoolCol": "int64", "IntColWithNull": "float", "BoolColWithNull": "float", }, "mysql": { "TextCol": "str", "DateCol": "str", "IntDateCol": "int64", "IntDateOnlyCol": "int64", "FloatCol": "float", "IntCol": "int64", "BoolCol": "bool", "IntColWithNull": "float", "BoolColWithNull": "float", }, "postgresql": { "TextCol": "str", "DateCol": "str", "DateColWithTz": "str", "IntDateCol": "int64", "IntDateOnlyCol": "int64", "FloatCol": "float", "IntCol": "int64", "BoolCol": "bool", "IntColWithNull": "float", "BoolColWithNull": "float", }, } dtypes = flavor_dtypes[flavor] return df[dtypes.keys()].astype(dtypes) df = DataFrame(data) self.types_test = { flavor: _filter_to_flavor(flavor, df) for flavor in ("sqlite", "mysql", "postgresql") } def _load_raw_sql(self): self.drop_table("types_test_data") self._get_exec().execute(SQL_STRINGS["create_test_types"][self.flavor]) ins = SQL_STRINGS["insert_test_types"][self.flavor] data = [ { "TextCol": "first", "DateCol": "2000-01-03 00:00:00", "DateColWithTz": "2000-01-01 00:00:00-08:00", "IntDateCol": 535852800, "IntDateOnlyCol": 20101010, "FloatCol": 10.10, "IntCol": 1, "BoolCol": False, "IntColWithNull": 1, "BoolColWithNull": False, }, { "TextCol": "first", "DateCol": "2000-01-04 00:00:00", "DateColWithTz": "2000-06-01 00:00:00-07:00", "IntDateCol": 1356998400, "IntDateOnlyCol": 20101212, "FloatCol": 10.10, "IntCol": 1, "BoolCol": False, "IntColWithNull": None, "BoolColWithNull": None, }, ] for d in data: self._get_exec().execute( ins["query"], [d[field] for field in ins["fields"]] ) self._load_types_test_data(data) def _count_rows(self, table_name): result = ( self._get_exec() .execute(f"SELECT count(*) AS count_1 FROM {table_name}") .fetchone() ) return result[0] def _read_sql_iris(self): iris_frame = self.pandasSQL.read_query("SELECT * FROM iris") self._check_iris_loaded_frame(iris_frame) def _read_sql_iris_parameter(self): query = SQL_STRINGS["read_parameters"][self.flavor] params = ["Iris-setosa", 5.1] iris_frame = self.pandasSQL.read_query(query, params=params) self._check_iris_loaded_frame(iris_frame) def _read_sql_iris_named_parameter(self): query = SQL_STRINGS["read_named_parameters"][self.flavor] params = {"name": "Iris-setosa", "length": 5.1} iris_frame = self.pandasSQL.read_query(query, params=params) self._check_iris_loaded_frame(iris_frame) def _read_sql_iris_no_parameter_with_percent(self): query = SQL_STRINGS["read_no_parameters_with_percent"][self.flavor] iris_frame = self.pandasSQL.read_query(query, params=None) self._check_iris_loaded_frame(iris_frame) def _to_sql(self, method=None): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", method=method) assert self.pandasSQL.has_table("test_frame1") num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries # Nuke table self.drop_table("test_frame1") def _to_sql_empty(self): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1.iloc[:0], "test_frame1") def _to_sql_fail(self): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") assert self.pandasSQL.has_table("test_frame1") msg = "Table 'test_frame1' already exists" with pytest.raises(ValueError, match=msg): self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") self.drop_table("test_frame1") def _to_sql_replace(self): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") # Add to table again self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="replace") assert self.pandasSQL.has_table("test_frame1") num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries self.drop_table("test_frame1") def _to_sql_append(self): # Nuke table just in case self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") # Add to table again self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="append") assert self.pandasSQL.has_table("test_frame1") num_entries = 2 * len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries self.drop_table("test_frame1") def _to_sql_method_callable(self): check = [] # used to double check function below is really being used def sample(pd_table, conn, keys, data_iter): check.append(1) data = [dict(zip(keys, row)) for row in data_iter] conn.execute(pd_table.table.insert(), data) self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", method=sample) assert self.pandasSQL.has_table("test_frame1") assert check == [1] num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries # Nuke table self.drop_table("test_frame1") def _roundtrip(self): self.drop_table("test_frame_roundtrip") self.pandasSQL.to_sql(self.test_frame1, "test_frame_roundtrip") result = self.pandasSQL.read_query("SELECT * FROM test_frame_roundtrip") result.set_index("level_0", inplace=True) # result.index.astype(int) result.index.name = None tm.assert_frame_equal(result, self.test_frame1) def _execute_sql(self): # drop_sql = "DROP TABLE IF EXISTS test" # should already be done iris_results = self.pandasSQL.execute("SELECT * FROM iris") row = iris_results.fetchone() tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"]) def _to_sql_save_index(self): df = DataFrame.from_records( [(1, 2.1, "line1"), (2, 1.5, "line2")], columns=["A", "B", "C"], index=["A"] ) self.pandasSQL.to_sql(df, "test_to_sql_saves_index") ix_cols = self._get_index_columns("test_to_sql_saves_index") assert ix_cols == [["A"]] def _transaction_test(self): with self.pandasSQL.run_transaction() as trans: trans.execute("CREATE TABLE test_trans (A INT, B TEXT)") class DummyException(Exception): pass # Make sure when transaction is rolled back, no rows get inserted ins_sql = "INSERT INTO test_trans (A,B) VALUES (1, 'blah')" try: with self.pandasSQL.run_transaction() as trans: trans.execute(ins_sql) raise DummyException("error") except DummyException: # ignore raised exception pass res = self.pandasSQL.read_query("SELECT * FROM test_trans") assert len(res) == 0 # Make sure when transaction is committed, rows do get inserted with self.pandasSQL.run_transaction() as trans: trans.execute(ins_sql) res2 = self.pandasSQL.read_query("SELECT * FROM test_trans") assert len(res2) == 1 # ----------------------------------------------------------------------------- # -- Testing the public API class _TestSQLApi(PandasSQLTest): """ Base class to test the public API. From this two classes are derived to run these tests for both the sqlalchemy mode (`TestSQLApi`) and the fallback mode (`TestSQLiteFallbackApi`). These tests are run with sqlite3. Specific tests for the different sql flavours are included in `_TestSQLAlchemy`. Notes: flavor can always be passed even in SQLAlchemy mode, should be correctly ignored. we don't use drop_table because that isn't part of the public api """ flavor = "sqlite" mode: str def setup_connect(self): self.conn = self.connect() @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): self.load_test_data_and_sql() def load_test_data_and_sql(self): self._load_iris_view() self._load_test1_data() self._load_test2_data() self._load_test3_data() self._load_raw_sql() def test_read_sql_iris(self): iris_frame = sql.read_sql_query("SELECT * FROM iris", self.conn) self._check_iris_loaded_frame(iris_frame) def test_read_sql_view(self): iris_frame = sql.read_sql_query("SELECT * FROM iris_view", self.conn) self._check_iris_loaded_frame(iris_frame) def test_read_sql_with_chunksize_no_result(self): query = "SELECT * FROM iris_view WHERE SepalLength < 0.0" with_batch = sql.read_sql_query(query, self.conn, chunksize=5) without_batch = sql.read_sql_query(query, self.conn) tm.assert_frame_equal(pd.concat(with_batch), without_batch) def test_to_sql(self): sql.to_sql(self.test_frame1, "test_frame1", self.conn) assert sql.has_table("test_frame1", self.conn) def test_to_sql_fail(self): sql.to_sql(self.test_frame1, "test_frame2", self.conn, if_exists="fail") assert sql.has_table("test_frame2", self.conn) msg = "Table 'test_frame2' already exists" with pytest.raises(ValueError, match=msg): sql.to_sql(self.test_frame1, "test_frame2", self.conn, if_exists="fail") def test_to_sql_replace(self): sql.to_sql(self.test_frame1, "test_frame3", self.conn, if_exists="fail") # Add to table again sql.to_sql(self.test_frame1, "test_frame3", self.conn, if_exists="replace") assert sql.has_table("test_frame3", self.conn) num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame3") assert num_rows == num_entries def test_to_sql_append(self): sql.to_sql(self.test_frame1, "test_frame4", self.conn, if_exists="fail") # Add to table again sql.to_sql(self.test_frame1, "test_frame4", self.conn, if_exists="append") assert sql.has_table("test_frame4", self.conn) num_entries = 2 * len(self.test_frame1) num_rows = self._count_rows("test_frame4") assert num_rows == num_entries def test_to_sql_type_mapping(self): sql.to_sql(self.test_frame3, "test_frame5", self.conn, index=False) result = sql.read_sql("SELECT * FROM test_frame5", self.conn) tm.assert_frame_equal(self.test_frame3, result) def test_to_sql_series(self): s = Series(np.arange(5, dtype="int64"), name="series") sql.to_sql(s, "test_series", self.conn, index=False) s2 = sql.read_sql_query("SELECT * FROM test_series", self.conn) tm.assert_frame_equal(s.to_frame(), s2) def test_roundtrip(self): sql.to_sql(self.test_frame1, "test_frame_roundtrip", con=self.conn) result = sql.read_sql_query("SELECT * FROM test_frame_roundtrip", con=self.conn) # HACK! result.index = self.test_frame1.index result.set_index("level_0", inplace=True) result.index.astype(int) result.index.name = None tm.assert_frame_equal(result, self.test_frame1) def test_roundtrip_chunksize(self): sql.to_sql( self.test_frame1, "test_frame_roundtrip", con=self.conn, index=False, chunksize=2, ) result = sql.read_sql_query("SELECT * FROM test_frame_roundtrip", con=self.conn) tm.assert_frame_equal(result, self.test_frame1) def test_execute_sql(self): # drop_sql = "DROP TABLE IF EXISTS test" # should already be done iris_results = sql.execute("SELECT * FROM iris", con=self.conn) row = iris_results.fetchone() tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"]) def test_date_parsing(self): # Test date parsing in read_sql # No Parsing df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn) assert not issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates=["DateCol"] ) assert issubclass(df.DateCol.dtype.type, np.datetime64) assert df.DateCol.tolist() == [ Timestamp(2000, 1, 3, 0, 0, 0), Timestamp(2000, 1, 4, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates={"DateCol": "%Y-%m-%d %H:%M:%S"}, ) assert issubclass(df.DateCol.dtype.type, np.datetime64) assert df.DateCol.tolist() == [ Timestamp(2000, 1, 3, 0, 0, 0), Timestamp(2000, 1, 4, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates=["IntDateCol"] ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) assert df.IntDateCol.tolist() == [ Timestamp(1986, 12, 25, 0, 0, 0), Timestamp(2013, 1, 1, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates={"IntDateCol": "s"} ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) assert df.IntDateCol.tolist() == [ Timestamp(1986, 12, 25, 0, 0, 0), Timestamp(2013, 1, 1, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates={"IntDateOnlyCol": "%Y%m%d"}, ) assert issubclass(df.IntDateOnlyCol.dtype.type, np.datetime64) assert df.IntDateOnlyCol.tolist() == [ Timestamp("2010-10-10"), Timestamp("2010-12-12"), ] @pytest.mark.parametrize("error", ["ignore", "raise", "coerce"]) @pytest.mark.parametrize( "read_sql, text, mode", [ (sql.read_sql, "SELECT * FROM types_test_data", ("sqlalchemy", "fallback")), (sql.read_sql, "types_test_data", ("sqlalchemy")), ( sql.read_sql_query, "SELECT * FROM types_test_data", ("sqlalchemy", "fallback"), ), (sql.read_sql_table, "types_test_data", ("sqlalchemy")), ], ) def test_custom_dateparsing_error(self, read_sql, text, mode, error): if self.mode in mode: expected = self.types_test[self.flavor].astype( {"DateCol": "datetime64[ns]"} ) result = read_sql( text, con=self.conn, parse_dates={ "DateCol": {"errors": error}, }, ) tm.assert_frame_equal(result, expected) def test_date_and_index(self): # Test case where same column appears in parse_date and index_col df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, index_col="DateCol", parse_dates=["DateCol", "IntDateCol"], ) assert issubclass(df.index.dtype.type, np.datetime64) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) def test_timedelta(self): # see #6921 df = to_timedelta(Series(["00:00:01", "00:00:03"], name="foo")).to_frame() with tm.assert_produces_warning(UserWarning): df.to_sql("test_timedelta", self.conn) result = sql.read_sql_query("SELECT * FROM test_timedelta", self.conn) tm.assert_series_equal(result["foo"], df["foo"].view("int64")) def test_complex_raises(self): df = DataFrame({"a": [1 + 1j, 2j]}) msg = "Complex datatypes not supported" with pytest.raises(ValueError, match=msg): df.to_sql("test_complex", self.conn) @pytest.mark.parametrize( "index_name,index_label,expected", [ # no index name, defaults to 'index' (None, None, "index"), # specifying index_label (None, "other_label", "other_label"), # using the index name ("index_name", None, "index_name"), # has index name, but specifying index_label ("index_name", "other_label", "other_label"), # index name is integer (0, None, "0"), # index name is None but index label is integer (None, 0, "0"), ], ) def test_to_sql_index_label(self, index_name, index_label, expected): temp_frame = DataFrame({"col1": range(4)}) temp_frame.index.name = index_name query = "SELECT * FROM test_index_label" sql.to_sql(temp_frame, "test_index_label", self.conn, index_label=index_label) frame = sql.read_sql_query(query, self.conn) assert frame.columns[0] == expected def test_to_sql_index_label_multiindex(self): temp_frame = DataFrame( {"col1": range(4)}, index=MultiIndex.from_product([("A0", "A1"), ("B0", "B1")]), ) # no index name, defaults to 'level_0' and 'level_1' sql.to_sql(temp_frame, "test_index_label", self.conn) frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[0] == "level_0" assert frame.columns[1] == "level_1" # specifying index_label sql.to_sql( temp_frame, "test_index_label", self.conn, if_exists="replace", index_label=["A", "B"], ) frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[:2].tolist() == ["A", "B"] # using the index name temp_frame.index.names = ["A", "B"] sql.to_sql(temp_frame, "test_index_label", self.conn, if_exists="replace") frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[:2].tolist() == ["A", "B"] # has index name, but specifying index_label sql.to_sql( temp_frame, "test_index_label", self.conn, if_exists="replace", index_label=["C", "D"], ) frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[:2].tolist() == ["C", "D"] msg = "Length of 'index_label' should match number of levels, which is 2" with pytest.raises(ValueError, match=msg): sql.to_sql( temp_frame, "test_index_label", self.conn, if_exists="replace", index_label="C", ) def test_multiindex_roundtrip(self): df = DataFrame.from_records( [(1, 2.1, "line1"), (2, 1.5, "line2")], columns=["A", "B", "C"], index=["A", "B"], ) df.to_sql("test_multiindex_roundtrip", self.conn) result = sql.read_sql_query( "SELECT * FROM test_multiindex_roundtrip", self.conn, index_col=["A", "B"] ) tm.assert_frame_equal(df, result, check_index_type=True) @pytest.mark.parametrize( "dtype", [ None, int, float, {"A": int, "B": float}, ], ) def test_dtype_argument(self, dtype): # GH10285 Add dtype argument to read_sql_query df = DataFrame([[1.2, 3.4], [5.6, 7.8]], columns=["A", "B"]) df.to_sql("test_dtype_argument", self.conn) expected = df.astype(dtype) result = sql.read_sql_query( "SELECT A, B FROM test_dtype_argument", con=self.conn, dtype=dtype ) tm.assert_frame_equal(result, expected) def test_integer_col_names(self): df = DataFrame([[1, 2], [3, 4]], columns=[0, 1]) sql.to_sql(df, "test_frame_integer_col_names", self.conn, if_exists="replace") def test_get_schema(self): create_sql = sql.get_schema(self.test_frame1, "test", con=self.conn) assert "CREATE" in create_sql def test_get_schema_with_schema(self): # GH28486 create_sql = sql.get_schema( self.test_frame1, "test", con=self.conn, schema="pypi" ) assert "CREATE TABLE pypi." in create_sql def test_get_schema_dtypes(self): float_frame = DataFrame({"a": [1.1, 1.2], "b": [2.1, 2.2]}) dtype = sqlalchemy.Integer if self.mode == "sqlalchemy" else "INTEGER" create_sql = sql.get_schema( float_frame, "test", con=self.conn, dtype={"b": dtype} ) assert "CREATE" in create_sql assert "INTEGER" in create_sql def test_get_schema_keys(self): frame = DataFrame({"Col1": [1.1, 1.2], "Col2": [2.1, 2.2]}) create_sql = sql.get_schema(frame, "test", con=self.conn, keys="Col1") constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("Col1")' assert constraint_sentence in create_sql # multiple columns as key (GH10385) create_sql = sql.get_schema( self.test_frame1, "test", con=self.conn, keys=["A", "B"] ) constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("A", "B")' assert constraint_sentence in create_sql def test_chunksize_read(self): df = DataFrame(np.random.randn(22, 5), columns=list("abcde")) df.to_sql("test_chunksize", self.conn, index=False) # reading the query in one time res1 = sql.read_sql_query("select * from test_chunksize", self.conn) # reading the query in chunks with read_sql_query res2 = DataFrame() i = 0 sizes = [5, 5, 5, 5, 2] for chunk in sql.read_sql_query( "select * from test_chunksize", self.conn, chunksize=5 ): res2 = concat([res2, chunk], ignore_index=True) assert len(chunk) == sizes[i] i += 1 tm.assert_frame_equal(res1, res2) # reading the query in chunks with read_sql_query if self.mode == "sqlalchemy": res3 = DataFrame() i = 0 sizes = [5, 5, 5, 5, 2] for chunk in sql.read_sql_table("test_chunksize", self.conn, chunksize=5): res3 = concat([res3, chunk], ignore_index=True) assert len(chunk) == sizes[i] i += 1 tm.assert_frame_equal(res1, res3) def test_categorical(self): # GH8624 # test that categorical gets written correctly as dense column df = DataFrame( { "person_id": [1, 2, 3], "person_name": ["John P. Doe", "Jane Dove", "John P. Doe"], } ) df2 = df.copy() df2["person_name"] = df2["person_name"].astype("category") df2.to_sql("test_categorical", self.conn, index=False) res = sql.read_sql_query("SELECT * FROM test_categorical", self.conn) tm.assert_frame_equal(res, df) def test_unicode_column_name(self): # GH 11431 df = DataFrame([[1, 2], [3, 4]], columns=["\xe9", "b"]) df.to_sql("test_unicode", self.conn, index=False) def test_escaped_table_name(self): # GH 13206 df = DataFrame({"A": [0, 1, 2], "B": [0.2, np.nan, 5.6]}) df.to_sql("d1187b08-4943-4c8d-a7f6", self.conn, index=False) res = sql.read_sql_query("SELECT * FROM `d1187b08-4943-4c8d-a7f6`", self.conn) tm.assert_frame_equal(res, df) @pytest.mark.single @pytest.mark.skipif(not SQLALCHEMY_INSTALLED, reason="SQLAlchemy not installed") class TestSQLApi(SQLAlchemyMixIn, _TestSQLApi): """ Test the public API as it would be used directly Tests for `read_sql_table` are included here, as this is specific for the sqlalchemy mode. """ flavor = "sqlite" mode = "sqlalchemy" def connect(self): return sqlalchemy.create_engine("sqlite:///:memory:") def test_read_table_columns(self): # test columns argument in read_table sql.to_sql(self.test_frame1, "test_frame", self.conn) cols = ["A", "B"] result = sql.read_sql_table("test_frame", self.conn, columns=cols) assert result.columns.tolist() == cols def test_read_table_index_col(self): # test columns argument in read_table sql.to_sql(self.test_frame1, "test_frame", self.conn) result = sql.read_sql_table("test_frame", self.conn, index_col="index") assert result.index.names == ["index"] result = sql.read_sql_table("test_frame", self.conn, index_col=["A", "B"]) assert result.index.names == ["A", "B"] result = sql.read_sql_table( "test_frame", self.conn, index_col=["A", "B"], columns=["C", "D"] ) assert result.index.names == ["A", "B"] assert result.columns.tolist() == ["C", "D"] def test_read_sql_delegate(self): iris_frame1 = sql.read_sql_query("SELECT * FROM iris", self.conn) iris_frame2 = sql.read_sql("SELECT * FROM iris", self.conn) tm.assert_frame_equal(iris_frame1, iris_frame2) iris_frame1 = sql.read_sql_table("iris", self.conn) iris_frame2 = sql.read_sql("iris", self.conn) tm.assert_frame_equal(iris_frame1, iris_frame2) def test_not_reflect_all_tables(self): # create invalid table qry = """CREATE TABLE invalid (x INTEGER, y UNKNOWN);""" self.conn.execute(qry) qry = """CREATE TABLE other_table (x INTEGER, y INTEGER);""" self.conn.execute(qry) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # Trigger a warning. sql.read_sql_table("other_table", self.conn) sql.read_sql_query("SELECT * FROM other_table", self.conn) # Verify some things assert len(w) == 0 def test_warning_case_insensitive_table_name(self): # see gh-7815 # # We can't test that this warning is triggered, a the database # configuration would have to be altered. But here we test that # the warning is certainly NOT triggered in a normal case. with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # This should not trigger a Warning self.test_frame1.to_sql("CaseSensitive", self.conn) # Verify some things assert len(w) == 0 def _get_index_columns(self, tbl_name): from sqlalchemy.engine import reflection insp = reflection.Inspector.from_engine(self.conn) ixs = insp.get_indexes("test_index_saved") ixs = [i["column_names"] for i in ixs] return ixs def test_sqlalchemy_type_mapping(self): # Test Timestamp objects (no datetime64 because of timezone) (GH9085) df = DataFrame( {"time": to_datetime(["201412120154", "201412110254"], utc=True)} ) db = sql.SQLDatabase(self.conn) table = sql.SQLTable("test_type", db, frame=df) # GH 9086: TIMESTAMP is the suggested type for datetimes with timezones assert isinstance(table.table.c["time"].type, sqltypes.TIMESTAMP) @pytest.mark.parametrize( "integer, expected", [ ("int8", "SMALLINT"), ("Int8", "SMALLINT"), ("uint8", "SMALLINT"), ("UInt8", "SMALLINT"), ("int16", "SMALLINT"), ("Int16", "SMALLINT"), ("uint16", "INTEGER"), ("UInt16", "INTEGER"), ("int32", "INTEGER"), ("Int32", "INTEGER"), ("uint32", "BIGINT"), ("UInt32", "BIGINT"), ("int64", "BIGINT"), ("Int64", "BIGINT"), (int, "BIGINT" if np.dtype(int).name == "int64" else "INTEGER"), ], ) def test_sqlalchemy_integer_mapping(self, integer, expected): # GH35076 Map pandas integer to optimal SQLAlchemy integer type df = DataFrame([0, 1], columns=["a"], dtype=integer) db = sql.SQLDatabase(self.conn) table = sql.SQLTable("test_type", db, frame=df) result = str(table.table.c.a.type) assert result == expected @pytest.mark.parametrize("integer", ["uint64", "UInt64"]) def test_sqlalchemy_integer_overload_mapping(self, integer): # GH35076 Map pandas integer to optimal SQLAlchemy integer type df = DataFrame([0, 1], columns=["a"], dtype=integer) db = sql.SQLDatabase(self.conn) with pytest.raises( ValueError, match="Unsigned 64 bit integer datatype is not supported" ): sql.SQLTable("test_type", db, frame=df) def test_database_uri_string(self): # Test read_sql and .to_sql method with a database URI (GH10654) test_frame1 = self.test_frame1 # db_uri = 'sqlite:///:memory:' # raises # sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) near # "iris": syntax error [SQL: 'iris'] with tm.ensure_clean() as name: db_uri = "sqlite:///" + name table = "iris" test_frame1.to_sql(table, db_uri, if_exists="replace", index=False) test_frame2 = sql.read_sql(table, db_uri) test_frame3 = sql.read_sql_table(table, db_uri) query = "SELECT * FROM iris" test_frame4 = sql.read_sql_query(query, db_uri) tm.assert_frame_equal(test_frame1, test_frame2) tm.assert_frame_equal(test_frame1, test_frame3) tm.assert_frame_equal(test_frame1, test_frame4) # using driver that will not be installed on Travis to trigger error # in sqlalchemy.create_engine -> test passing of this error to user try: # the rest of this test depends on pg8000's being absent import pg8000 # noqa pytest.skip("pg8000 is installed") except ImportError: pass db_uri = "postgresql+pg8000://user:pass@host/dbname" with pytest.raises(ImportError, match="pg8000"): sql.read_sql("select * from table", db_uri) def _make_iris_table_metadata(self): sa = sqlalchemy metadata = sa.MetaData() iris = sa.Table( "iris", metadata, sa.Column("SepalLength", sa.REAL), sa.Column("SepalWidth", sa.REAL), sa.Column("PetalLength", sa.REAL), sa.Column("PetalWidth", sa.REAL), sa.Column("Name", sa.TEXT), ) return iris def test_query_by_text_obj(self): # WIP : GH10846 name_text = sqlalchemy.text("select * from iris where name=:name") iris_df = sql.read_sql(name_text, self.conn, params={"name": "Iris-versicolor"}) all_names = set(iris_df["Name"]) assert all_names == {"Iris-versicolor"} def test_query_by_select_obj(self): # WIP : GH10846 iris = self._make_iris_table_metadata() name_select = sqlalchemy.select([iris]).where( iris.c.Name == sqlalchemy.bindparam("name") ) iris_df = sql.read_sql(name_select, self.conn, params={"name": "Iris-setosa"}) all_names = set(iris_df["Name"]) assert all_names == {"Iris-setosa"} def test_column_with_percentage(self): # GH 37157 df = DataFrame({"A": [0, 1, 2], "%_variation": [3, 4, 5]}) df.to_sql("test_column_percentage", self.conn, index=False) res = sql.read_sql_table("test_column_percentage", self.conn) tm.assert_frame_equal(res, df) class _EngineToConnMixin: """ A mixin that causes setup_connect to create a conn rather than an engine. """ @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): super().load_test_data_and_sql() engine = self.conn conn = engine.connect() self.__tx = conn.begin() self.pandasSQL = sql.SQLDatabase(conn) self.__engine = engine self.conn = conn yield self.__tx.rollback() self.conn.close() self.conn = self.__engine self.pandasSQL = sql.SQLDatabase(self.__engine) @pytest.mark.single class TestSQLApiConn(_EngineToConnMixin, TestSQLApi): pass @pytest.mark.single class TestSQLiteFallbackApi(SQLiteMixIn, _TestSQLApi): """ Test the public sqlite connection fallback API """ flavor = "sqlite" mode = "fallback" def connect(self, database=":memory:"): return sqlite3.connect(database) def test_sql_open_close(self): # Test if the IO in the database still work if the connection closed # between the writing and reading (as in many real situations). with tm.ensure_clean() as name: conn = self.connect(name) sql.to_sql(self.test_frame3, "test_frame3_legacy", conn, index=False) conn.close() conn = self.connect(name) result = sql.read_sql_query("SELECT * FROM test_frame3_legacy;", conn) conn.close() tm.assert_frame_equal(self.test_frame3, result) @pytest.mark.skipif(SQLALCHEMY_INSTALLED, reason="SQLAlchemy is installed") def test_con_string_import_error(self): conn = "mysql://root@localhost/pandas" msg = "Using URI string without sqlalchemy installed" with pytest.raises(ImportError, match=msg): sql.read_sql("SELECT * FROM iris", conn) def test_read_sql_delegate(self): iris_frame1 = sql.read_sql_query("SELECT * FROM iris", self.conn) iris_frame2 = sql.read_sql("SELECT * FROM iris", self.conn) tm.assert_frame_equal(iris_frame1, iris_frame2) msg = "Execution failed on sql 'iris': near \"iris\": syntax error" with pytest.raises(sql.DatabaseError, match=msg): sql.read_sql("iris", self.conn) def test_safe_names_warning(self): # GH 6798 df = DataFrame([[1, 2], [3, 4]], columns=["a", "b "]) # has a space # warns on create table with spaces in names with tm.assert_produces_warning(): sql.to_sql(df, "test_frame3_legacy", self.conn, index=False) def test_get_schema2(self): # without providing a connection object (available for backwards comp) create_sql = sql.get_schema(self.test_frame1, "test") assert "CREATE" in create_sql def _get_sqlite_column_type(self, schema, column): for col in schema.split("\n"): if col.split()[0].strip('""') == column: return col.split()[1] raise ValueError(f"Column {column} not found") def test_sqlite_type_mapping(self): # Test Timestamp objects (no datetime64 because of timezone) (GH9085) df = DataFrame( {"time": to_datetime(["201412120154", "201412110254"], utc=True)} ) db = sql.SQLiteDatabase(self.conn) table = sql.SQLiteTable("test_type", db, frame=df) schema = table.sql_schema() assert self._get_sqlite_column_type(schema, "time") == "TIMESTAMP" # ----------------------------------------------------------------------------- # -- Database flavor specific tests class _TestSQLAlchemy(SQLAlchemyMixIn, PandasSQLTest): """ Base class for testing the sqlalchemy backend. Subclasses for specific database types are created below. Tests that deviate for each flavor are overwritten there. """ flavor: str @pytest.fixture(autouse=True, scope="class") def setup_class(cls): cls.setup_import() cls.setup_driver() conn = cls.conn = cls.connect() conn.connect() def load_test_data_and_sql(self): self._load_raw_sql() self._load_test1_data() @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): self.load_test_data_and_sql() @classmethod def setup_import(cls): # Skip this test if SQLAlchemy not available if not SQLALCHEMY_INSTALLED: pytest.skip("SQLAlchemy not installed") @classmethod def setup_driver(cls): raise NotImplementedError() @classmethod def connect(cls): raise NotImplementedError() def setup_connect(self): try: self.conn = self.connect() self.pandasSQL = sql.SQLDatabase(self.conn) # to test if connection can be made: self.conn.connect() except sqlalchemy.exc.OperationalError: pytest.skip(f"Can't connect to {self.flavor} server") def test_read_sql(self): self._read_sql_iris() def test_read_sql_parameter(self): self._read_sql_iris_parameter() def test_read_sql_named_parameter(self): self._read_sql_iris_named_parameter() def test_to_sql(self): self._to_sql() def test_to_sql_empty(self): self._to_sql_empty() def test_to_sql_fail(self): self._to_sql_fail() def test_to_sql_replace(self): self._to_sql_replace() def test_to_sql_append(self): self._to_sql_append() def test_to_sql_method_multi(self): self._to_sql(method="multi") def test_to_sql_method_callable(self): self._to_sql_method_callable() def test_create_table(self): temp_conn = self.connect() temp_frame = DataFrame( {"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]} ) pandasSQL = sql.SQLDatabase(temp_conn) pandasSQL.to_sql(temp_frame, "temp_frame") assert temp_conn.has_table("temp_frame") def test_drop_table(self): temp_conn = self.connect() temp_frame = DataFrame( {"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]} ) pandasSQL = sql.SQLDatabase(temp_conn) pandasSQL.to_sql(temp_frame, "temp_frame") assert temp_conn.has_table("temp_frame") pandasSQL.drop_table("temp_frame") assert not temp_conn.has_table("temp_frame") def test_roundtrip(self): self._roundtrip() def test_execute_sql(self): self._execute_sql() def test_read_table(self): iris_frame = sql.read_sql_table("iris", con=self.conn) self._check_iris_loaded_frame(iris_frame) def test_read_table_columns(self): iris_frame = sql.read_sql_table( "iris", con=self.conn, columns=["SepalLength", "SepalLength"] ) tm.equalContents(iris_frame.columns.values, ["SepalLength", "SepalLength"]) def test_read_table_absent_raises(self): msg = "Table this_doesnt_exist not found" with pytest.raises(ValueError, match=msg): sql.read_sql_table("this_doesnt_exist", con=self.conn) def test_default_type_conversion(self): df = sql.read_sql_table("types_test_data", self.conn) assert issubclass(df.FloatCol.dtype.type, np.floating) assert issubclass(df.IntCol.dtype.type, np.integer) assert issubclass(df.BoolCol.dtype.type, np.bool_) # Int column with NA values stays as float assert issubclass(df.IntColWithNull.dtype.type, np.floating) # Bool column with NA values becomes object assert issubclass(df.BoolColWithNull.dtype.type, object) def test_bigint(self): # int64 should be converted to BigInteger, GH7433 df = DataFrame(data={"i64": [2 ** 62]}) df.to_sql("test_bigint", self.conn, index=False) result = sql.read_sql_table("test_bigint", self.conn) tm.assert_frame_equal(df, result) def test_default_date_load(self): df = sql.read_sql_table("types_test_data", self.conn) # IMPORTANT - sqlite has no native date type, so shouldn't parse, but # MySQL SHOULD be converted. assert issubclass(df.DateCol.dtype.type, np.datetime64) def test_datetime_with_timezone(self): # edge case that converts postgresql datetime with time zone types # to datetime64[ns,psycopg2.tz.FixedOffsetTimezone..], which is ok # but should be more natural, so coerce to datetime64[ns] for now def check(col): # check that a column is either datetime64[ns] # or datetime64[ns, UTC] if is_datetime64_dtype(col.dtype): # "2000-01-01 00:00:00-08:00" should convert to # "2000-01-01 08:00:00" assert col[0] == Timestamp("2000-01-01 08:00:00") # "2000-06-01 00:00:00-07:00" should convert to # "2000-06-01 07:00:00" assert col[1] == Timestamp("2000-06-01 07:00:00") elif is_datetime64tz_dtype(col.dtype): assert str(col.dt.tz) == "UTC" # "2000-01-01 00:00:00-08:00" should convert to # "2000-01-01 08:00:00" # "2000-06-01 00:00:00-07:00" should convert to # "2000-06-01 07:00:00" # GH 6415 expected_data = [ Timestamp("2000-01-01 08:00:00", tz="UTC"), Timestamp("2000-06-01 07:00:00", tz="UTC"), ] expected = Series(expected_data, name=col.name) tm.assert_series_equal(col, expected) else: raise AssertionError( f"DateCol loaded with incorrect type -> {col.dtype}" ) # GH11216 df = pd.read_sql_query("select * from types_test_data", self.conn) if not hasattr(df, "DateColWithTz"): pytest.skip("no column with datetime with time zone") # this is parsed on Travis (linux), but not on macosx for some reason # even with the same versions of psycopg2 & sqlalchemy, possibly a # Postgresql server version difference col = df.DateColWithTz assert is_datetime64tz_dtype(col.dtype) df = pd.read_sql_query( "select * from types_test_data", self.conn, parse_dates=["DateColWithTz"] ) if not hasattr(df, "DateColWithTz"): pytest.skip("no column with datetime with time zone") col = df.DateColWithTz assert is_datetime64tz_dtype(col.dtype) assert str(col.dt.tz) == "UTC" check(df.DateColWithTz) df = pd.concat( list( pd.read_sql_query( "select * from types_test_data", self.conn, chunksize=1 ) ), ignore_index=True, ) col = df.DateColWithTz assert is_datetime64tz_dtype(col.dtype) assert str(col.dt.tz) == "UTC" expected = sql.read_sql_table("types_test_data", self.conn) col = expected.DateColWithTz assert is_datetime64tz_dtype(col.dtype) tm.assert_series_equal(df.DateColWithTz, expected.DateColWithTz) # xref #7139 # this might or might not be converted depending on the postgres driver df = sql.read_sql_table("types_test_data", self.conn) check(df.DateColWithTz) def test_datetime_with_timezone_roundtrip(self): # GH 9086 # Write datetimetz data to a db and read it back # For dbs that support timestamps with timezones, should get back UTC # otherwise naive data should be returned expected = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3, tz="US/Pacific")} ) expected.to_sql("test_datetime_tz", self.conn, index=False) if self.flavor == "postgresql": # SQLAlchemy "timezones" (i.e. offsets) are coerced to UTC expected["A"] = expected["A"].dt.tz_convert("UTC") else: # Otherwise, timestamps are returned as local, naive expected["A"] = expected["A"].dt.tz_localize(None) result = sql.read_sql_table("test_datetime_tz", self.conn) tm.assert_frame_equal(result, expected) result = sql.read_sql_query("SELECT * FROM test_datetime_tz", self.conn) if self.flavor == "sqlite": # read_sql_query does not return datetime type like read_sql_table assert isinstance(result.loc[0, "A"], str) result["A"] = to_datetime(result["A"]) tm.assert_frame_equal(result, expected) def test_out_of_bounds_datetime(self): # GH 26761 data = DataFrame({"date": datetime(9999, 1, 1)}, index=[0]) data.to_sql("test_datetime_obb", self.conn, index=False) result = sql.read_sql_table("test_datetime_obb", self.conn) expected = DataFrame([pd.NaT], columns=["date"]) tm.assert_frame_equal(result, expected) def test_naive_datetimeindex_roundtrip(self): # GH 23510 # Ensure that a naive DatetimeIndex isn't converted to UTC dates = date_range("2018-01-01", periods=5, freq="6H")._with_freq(None) expected = DataFrame({"nums": range(5)}, index=dates) expected.to_sql("foo_table", self.conn, index_label="info_date") result = sql.read_sql_table("foo_table", self.conn, index_col="info_date") # result index with gain a name from a set_index operation; expected tm.assert_frame_equal(result, expected, check_names=False) def test_date_parsing(self): # No Parsing df = sql.read_sql_table("types_test_data", self.conn) expected_type = object if self.flavor == "sqlite" else np.datetime64 assert issubclass(df.DateCol.dtype.type, expected_type) df = sql.read_sql_table("types_test_data", self.conn, parse_dates=["DateCol"]) assert issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"DateCol": "%Y-%m-%d %H:%M:%S"} ) assert issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"DateCol": {"format": "%Y-%m-%d %H:%M:%S"}}, ) assert issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates=["IntDateCol"] ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"IntDateCol": "s"} ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"IntDateCol": {"unit": "s"}} ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) def test_datetime(self): df = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)} ) df.to_sql("test_datetime", self.conn) # with read_table -> type information from schema used result = sql.read_sql_table("test_datetime", self.conn) result = result.drop("index", axis=1) tm.assert_frame_equal(result, df) # with read_sql -> no type information -> sqlite has no native result = sql.read_sql_query("SELECT * FROM test_datetime", self.conn) result = result.drop("index", axis=1) if self.flavor == "sqlite": assert isinstance(result.loc[0, "A"], str) result["A"] = to_datetime(result["A"]) tm.assert_frame_equal(result, df) else: tm.assert_frame_equal(result, df) def test_datetime_NaT(self): df = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)} ) df.loc[1, "A"] = np.nan df.to_sql("test_datetime", self.conn, index=False) # with read_table -> type information from schema used result = sql.read_sql_table("test_datetime", self.conn) tm.assert_frame_equal(result, df) # with read_sql -> no type information -> sqlite has no native result = sql.read_sql_query("SELECT * FROM test_datetime", self.conn) if self.flavor == "sqlite": assert isinstance(result.loc[0, "A"], str) result["A"] = to_datetime(result["A"], errors="coerce") tm.assert_frame_equal(result, df) else: tm.assert_frame_equal(result, df) def test_datetime_date(self): # test support for datetime.date df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"]) df.to_sql("test_date", self.conn, index=False) res = read_sql_table("test_date", self.conn) result = res["a"] expected = to_datetime(df["a"]) # comes back as datetime64 tm.assert_series_equal(result, expected) def test_datetime_time(self): # test support for datetime.time df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"]) df.to_sql("test_time", self.conn, index=False) res = read_sql_table("test_time", self.conn) tm.assert_frame_equal(res, df) # GH8341 # first, use the fallback to have the sqlite adapter put in place sqlite_conn = TestSQLiteFallback.connect() sql.to_sql(df, "test_time2", sqlite_conn, index=False) res = sql.read_sql_query("SELECT * FROM test_time2", sqlite_conn) ref = df.applymap(lambda _: _.strftime("%H:%M:%S.%f")) tm.assert_frame_equal(ref, res) # check if adapter is in place # then test if sqlalchemy is unaffected by the sqlite adapter sql.to_sql(df, "test_time3", self.conn, index=False) if self.flavor == "sqlite": res = sql.read_sql_query("SELECT * FROM test_time3", self.conn) ref = df.applymap(lambda _: _.strftime("%H:%M:%S.%f")) tm.assert_frame_equal(ref, res) res = sql.read_sql_table("test_time3", self.conn) tm.assert_frame_equal(df, res) def test_mixed_dtype_insert(self): # see GH6509 s1 = Series(2 ** 25 + 1, dtype=np.int32) s2 = Series(0.0, dtype=np.float32) df = DataFrame({"s1": s1, "s2": s2}) # write and read again df.to_sql("test_read_write", self.conn, index=False) df2 = sql.read_sql_table("test_read_write", self.conn) tm.assert_frame_equal(df, df2, check_dtype=False, check_exact=True) def test_nan_numeric(self): # NaNs in numeric float column df = DataFrame({"A": [0, 1, 2], "B": [0.2, np.nan, 5.6]}) df.to_sql("test_nan", self.conn, index=False) # with read_table result = sql.read_sql_table("test_nan", self.conn) tm.assert_frame_equal(result, df) # with read_sql result = sql.read_sql_query("SELECT * FROM test_nan", self.conn) tm.assert_frame_equal(result, df) def test_nan_fullcolumn(self): # full NaN column (numeric float column) df = DataFrame({"A": [0, 1, 2], "B": [np.nan, np.nan, np.nan]}) df.to_sql("test_nan", self.conn, index=False) # with read_table result = sql.read_sql_table("test_nan", self.conn) tm.assert_frame_equal(result, df) # with read_sql -> not type info from table -> stays None df["B"] = df["B"].astype("object") df["B"] = None result = sql.read_sql_query("SELECT * FROM test_nan", self.conn) tm.assert_frame_equal(result, df) def test_nan_string(self): # NaNs in string column df = DataFrame({"A": [0, 1, 2], "B": ["a", "b", np.nan]}) df.to_sql("test_nan", self.conn, index=False) # NaNs are coming back as None df.loc[2, "B"] = None # with read_table result = sql.read_sql_table("test_nan", self.conn) tm.assert_frame_equal(result, df) # with read_sql result = sql.read_sql_query("SELECT * FROM test_nan", self.conn) tm.assert_frame_equal(result, df) def _get_index_columns(self, tbl_name): from sqlalchemy.engine import reflection insp = reflection.Inspector.from_engine(self.conn) ixs = insp.get_indexes(tbl_name) ixs = [i["column_names"] for i in ixs] return ixs def test_to_sql_save_index(self): self._to_sql_save_index() def test_transactions(self): self._transaction_test() def test_get_schema_create_table(self): # Use a dataframe without a bool column, since MySQL converts bool to # TINYINT (which read_sql_table returns as an int and causes a dtype # mismatch) self._load_test3_data() tbl = "test_get_schema_create_table" create_sql = sql.get_schema(self.test_frame3, tbl, con=self.conn) blank_test_df = self.test_frame3.iloc[:0] self.drop_table(tbl) self.conn.execute(create_sql) returned_df = sql.read_sql_table(tbl, self.conn) tm.assert_frame_equal(returned_df, blank_test_df, check_index_type=False) self.drop_table(tbl) def test_dtype(self): cols = ["A", "B"] data = [(0.8, True), (0.9, None)] df = DataFrame(data, columns=cols) df.to_sql("dtype_test", self.conn) df.to_sql("dtype_test2", self.conn, dtype={"B": sqlalchemy.TEXT}) meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() sqltype = meta.tables["dtype_test2"].columns["B"].type assert isinstance(sqltype, sqlalchemy.TEXT) msg = "The type of B is not a SQLAlchemy type" with pytest.raises(ValueError, match=msg): df.to_sql("error", self.conn, dtype={"B": str}) # GH9083 df.to_sql("dtype_test3", self.conn, dtype={"B": sqlalchemy.String(10)}) meta.reflect() sqltype = meta.tables["dtype_test3"].columns["B"].type assert isinstance(sqltype, sqlalchemy.String) assert sqltype.length == 10 # single dtype df.to_sql("single_dtype_test", self.conn, dtype=sqlalchemy.TEXT) meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() sqltypea = meta.tables["single_dtype_test"].columns["A"].type sqltypeb = meta.tables["single_dtype_test"].columns["B"].type assert isinstance(sqltypea, sqlalchemy.TEXT) assert isinstance(sqltypeb, sqlalchemy.TEXT) def test_notna_dtype(self): cols = { "Bool": Series([True, None]), "Date": Series([datetime(2012, 5, 1), None]), "Int": Series([1, None], dtype="object"), "Float": Series([1.1, None]), } df = DataFrame(cols) tbl = "notna_dtype_test" df.to_sql(tbl, self.conn) returned_df = sql.read_sql_table(tbl, self.conn) # noqa meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() if self.flavor == "mysql": my_type = sqltypes.Integer else: my_type = sqltypes.Boolean col_dict = meta.tables[tbl].columns assert isinstance(col_dict["Bool"].type, my_type) assert isinstance(col_dict["Date"].type, sqltypes.DateTime) assert isinstance(col_dict["Int"].type, sqltypes.Integer) assert isinstance(col_dict["Float"].type, sqltypes.Float) def test_double_precision(self): V = 1.23456789101112131415 df = DataFrame( { "f32": Series([V], dtype="float32"), "f64": Series([V], dtype="float64"), "f64_as_f32": Series([V], dtype="float64"), "i32": Series([5], dtype="int32"), "i64": Series([5], dtype="int64"), } ) df.to_sql( "test_dtypes", self.conn, index=False, if_exists="replace", dtype={"f64_as_f32": sqlalchemy.Float(precision=23)}, ) res = sql.read_sql_table("test_dtypes", self.conn) # check precision of float64 assert np.round(df["f64"].iloc[0], 14) == np.round(res["f64"].iloc[0], 14) # check sql types meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() col_dict = meta.tables["test_dtypes"].columns assert str(col_dict["f32"].type) == str(col_dict["f64_as_f32"].type) assert isinstance(col_dict["f32"].type, sqltypes.Float) assert isinstance(col_dict["f64"].type, sqltypes.Float) assert isinstance(col_dict["i32"].type, sqltypes.Integer) assert isinstance(col_dict["i64"].type, sqltypes.BigInteger) def test_connectable_issue_example(self): # This tests the example raised in issue # https://github.com/pandas-dev/pandas/issues/10104 def foo(connection): query = "SELECT test_foo_data FROM test_foo_data" return sql.read_sql_query(query, con=connection) def bar(connection, data): data.to_sql(name="test_foo_data", con=connection, if_exists="append") def main(connectable): with connectable.connect() as conn: with conn.begin(): foo_data = conn.run_callable(foo) conn.run_callable(bar, foo_data) DataFrame({"test_foo_data": [0, 1, 2]}).to_sql("test_foo_data", self.conn) main(self.conn) @pytest.mark.parametrize( "input", [{"foo": [np.inf]}, {"foo": [-np.inf]}, {"foo": [-np.inf], "infe0": ["bar"]}], ) def test_to_sql_with_negative_npinf(self, input): # GH 34431 df = DataFrame(input) if self.flavor == "mysql": msg = "inf cannot be used with MySQL" with pytest.raises(ValueError, match=msg): df.to_sql("foobar", self.conn, index=False) else: df.to_sql("foobar", self.conn, index=False) res = sql.read_sql_table("foobar", self.conn) tm.assert_equal(df, res) def test_temporary_table(self): test_data = "Hello, World!" expected = DataFrame({"spam": [test_data]}) Base = declarative.declarative_base() class Temporary(Base): __tablename__ = "temp_test" __table_args__ = {"prefixes": ["TEMPORARY"]} id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True) spam = sqlalchemy.Column(sqlalchemy.Unicode(30), nullable=False) Session = sa_session.sessionmaker(bind=self.conn) session = Session() with session.transaction: conn = session.connection() Temporary.__table__.create(conn) session.add(Temporary(spam=test_data)) session.flush() df = sql.read_sql_query(sql=sqlalchemy.select([Temporary.spam]), con=conn) tm.assert_frame_equal(df, expected) class _TestSQLAlchemyConn(_EngineToConnMixin, _TestSQLAlchemy): def test_transactions(self): pytest.skip("Nested transactions rollbacks don't work with Pandas") class _TestSQLiteAlchemy: """ Test the sqlalchemy backend against an in-memory sqlite database. """ flavor = "sqlite" @classmethod def connect(cls): return sqlalchemy.create_engine("sqlite:///:memory:") @classmethod def setup_driver(cls): # sqlite3 is built-in cls.driver = None def test_default_type_conversion(self): df = sql.read_sql_table("types_test_data", self.conn) assert issubclass(df.FloatCol.dtype.type, np.floating) assert issubclass(df.IntCol.dtype.type, np.integer) # sqlite has no boolean type, so integer type is returned assert issubclass(df.BoolCol.dtype.type, np.integer) # Int column with NA values stays as float assert issubclass(df.IntColWithNull.dtype.type, np.floating) # Non-native Bool column with NA values stays as float assert issubclass(df.BoolColWithNull.dtype.type, np.floating) def test_default_date_load(self): df = sql.read_sql_table("types_test_data", self.conn) # IMPORTANT - sqlite has no native date type, so shouldn't parse, but assert not issubclass(df.DateCol.dtype.type, np.datetime64) def test_bigint_warning(self): # test no warning for BIGINT (to support int64) is raised (GH7433) df = DataFrame({"a": [1, 2]}, dtype="int64") df.to_sql("test_bigintwarning", self.conn, index=False) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") sql.read_sql_table("test_bigintwarning", self.conn) assert len(w) == 0 class _TestMySQLAlchemy: """ Test the sqlalchemy backend against an MySQL database. """ flavor = "mysql" port = 3306 @classmethod def connect(cls): return sqlalchemy.create_engine( f"mysql+{cls.driver}://root@localhost:{cls.port}/pandas", connect_args=cls.connect_args, ) @classmethod def setup_driver(cls): pymysql = pytest.importorskip("pymysql") cls.driver = "pymysql" cls.connect_args = {"client_flag": pymysql.constants.CLIENT.MULTI_STATEMENTS} def test_default_type_conversion(self): df = sql.read_sql_table("types_test_data", self.conn) assert issubclass(df.FloatCol.dtype.type, np.floating) assert issubclass(df.IntCol.dtype.type, np.integer) # MySQL has no real BOOL type (it's an alias for TINYINT) assert issubclass(df.BoolCol.dtype.type, np.integer) # Int column with NA values stays as float assert issubclass(df.IntColWithNull.dtype.type, np.floating) # Bool column with NA = int column with NA values => becomes float assert issubclass(df.BoolColWithNull.dtype.type, np.floating) def test_read_procedure(self): import pymysql # see GH7324. Although it is more an api test, it is added to the # mysql tests as sqlite does not have stored procedures df = DataFrame({"a": [1, 2, 3], "b": [0.1, 0.2, 0.3]}) df.to_sql("test_procedure", self.conn, index=False) proc = """DROP PROCEDURE IF EXISTS get_testdb; CREATE PROCEDURE get_testdb () BEGIN SELECT * FROM test_procedure; END""" connection = self.conn.connect() trans = connection.begin() try: r1 = connection.execute(proc) # noqa trans.commit() except pymysql.Error: trans.rollback() raise res1 = sql.read_sql_query("CALL get_testdb();", self.conn) tm.assert_frame_equal(df, res1) # test delegation to read_sql_query res2 = sql.read_sql("CALL get_testdb();", self.conn) tm.assert_frame_equal(df, res2) class _TestPostgreSQLAlchemy: """ Test the sqlalchemy backend against an PostgreSQL database. """ flavor = "postgresql" port = 5432 @classmethod def connect(cls): return sqlalchemy.create_engine( f"postgresql+{cls.driver}://postgres:postgres@localhost:{cls.port}/pandas" ) @classmethod def setup_driver(cls): pytest.importorskip("psycopg2") cls.driver = "psycopg2" def test_schema_support(self): # only test this for postgresql (schema's not supported in # mysql/sqlite) df = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]}) # create a schema self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;") self.conn.execute("CREATE SCHEMA other;") # write dataframe to different schema's df.to_sql("test_schema_public", self.conn, index=False) df.to_sql( "test_schema_public_explicit", self.conn, index=False, schema="public" ) df.to_sql("test_schema_other", self.conn, index=False, schema="other") # read dataframes back in res1 = sql.read_sql_table("test_schema_public", self.conn) tm.assert_frame_equal(df, res1) res2 = sql.read_sql_table("test_schema_public_explicit", self.conn) tm.assert_frame_equal(df, res2) res3 = sql.read_sql_table( "test_schema_public_explicit", self.conn, schema="public" ) tm.assert_frame_equal(df, res3) res4 = sql.read_sql_table("test_schema_other", self.conn, schema="other") tm.assert_frame_equal(df, res4) msg = "Table test_schema_other not found" with pytest.raises(ValueError, match=msg): sql.read_sql_table("test_schema_other", self.conn, schema="public") # different if_exists options # create a schema self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;") self.conn.execute("CREATE SCHEMA other;") # write dataframe with different if_exists options df.to_sql("test_schema_other", self.conn, schema="other", index=False) df.to_sql( "test_schema_other", self.conn, schema="other", index=False, if_exists="replace", ) df.to_sql( "test_schema_other", self.conn, schema="other", index=False, if_exists="append", ) res = sql.read_sql_table("test_schema_other", self.conn, schema="other") tm.assert_frame_equal(concat([df, df], ignore_index=True), res) # specifying schema in user-provided meta # The schema won't be applied on another Connection # because of transactional schemas if isinstance(self.conn, sqlalchemy.engine.Engine): engine2 = self.connect() meta = sqlalchemy.MetaData(engine2, schema="other") pdsql = sql.SQLDatabase(engine2, meta=meta) pdsql.to_sql(df, "test_schema_other2", index=False) pdsql.to_sql(df, "test_schema_other2", index=False, if_exists="replace") pdsql.to_sql(df, "test_schema_other2", index=False, if_exists="append") res1 = sql.read_sql_table("test_schema_other2", self.conn, schema="other") res2 = pdsql.read_table("test_schema_other2") tm.assert_frame_equal(res1, res2) def test_copy_from_callable_insertion_method(self): # GH 8953 # Example in io.rst found under _io.sql.method # not available in sqlite, mysql def psql_insert_copy(table, conn, keys, data_iter): # gets a DBAPI connection that can provide a cursor dbapi_conn = conn.connection with dbapi_conn.cursor() as cur: s_buf = StringIO() writer = csv.writer(s_buf) writer.writerows(data_iter) s_buf.seek(0) columns = ", ".join(f'"{k}"' for k in keys) if table.schema: table_name = f"{table.schema}.{table.name}" else: table_name = table.name sql_query = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV" cur.copy_expert(sql=sql_query, file=s_buf) expected = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]}) expected.to_sql( "test_copy_insert", self.conn, index=False, method=psql_insert_copy ) result = sql.read_sql_table("test_copy_insert", self.conn) tm.assert_frame_equal(result, expected) @pytest.mark.single @pytest.mark.db class TestMySQLAlchemy(_TestMySQLAlchemy, _TestSQLAlchemy): pass @pytest.mark.single @pytest.mark.db class TestMySQLAlchemyConn(_TestMySQLAlchemy, _TestSQLAlchemyConn): pass @pytest.mark.single @pytest.mark.db class TestPostgreSQLAlchemy(_TestPostgreSQLAlchemy, _TestSQLAlchemy): pass @pytest.mark.single @pytest.mark.db class TestPostgreSQLAlchemyConn(_TestPostgreSQLAlchemy, _TestSQLAlchemyConn): pass @pytest.mark.single class TestSQLiteAlchemy(_TestSQLiteAlchemy, _TestSQLAlchemy): pass @pytest.mark.single class TestSQLiteAlchemyConn(_TestSQLiteAlchemy, _TestSQLAlchemyConn): pass # ----------------------------------------------------------------------------- # -- Test Sqlite / MySQL fallback @pytest.mark.single class TestSQLiteFallback(SQLiteMixIn, PandasSQLTest): """ Test the fallback mode against an in-memory sqlite database. """ flavor = "sqlite" @classmethod def connect(cls): return sqlite3.connect(":memory:") def setup_connect(self): self.conn = self.connect() def load_test_data_and_sql(self): self.pandasSQL = sql.SQLiteDatabase(self.conn) self._load_test1_data() @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): self.load_test_data_and_sql() def test_read_sql(self): self._read_sql_iris() def test_read_sql_parameter(self): self._read_sql_iris_parameter() def test_read_sql_named_parameter(self): self._read_sql_iris_named_parameter() def test_to_sql(self): self._to_sql() def test_to_sql_empty(self): self._to_sql_empty() def test_to_sql_fail(self): self._to_sql_fail() def test_to_sql_replace(self): self._to_sql_replace() def test_to_sql_append(self): self._to_sql_append() def test_to_sql_method_multi(self): # GH 29921 self._to_sql(method="multi") def test_create_and_drop_table(self): temp_frame = DataFrame( {"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]} ) self.pandasSQL.to_sql(temp_frame, "drop_test_frame") assert self.pandasSQL.has_table("drop_test_frame") self.pandasSQL.drop_table("drop_test_frame") assert not self.pandasSQL.has_table("drop_test_frame") def test_roundtrip(self): self._roundtrip() def test_execute_sql(self): self._execute_sql() def test_datetime_date(self): # test support for datetime.date df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"]) df.to_sql("test_date", self.conn, index=False) res = read_sql_query("SELECT * FROM test_date", self.conn) if self.flavor == "sqlite": # comes back as strings tm.assert_frame_equal(res, df.astype(str)) elif self.flavor == "mysql": tm.assert_frame_equal(res, df) def test_datetime_time(self): # test support for datetime.time, GH #8341 df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"]) df.to_sql("test_time", self.conn, index=False) res = read_sql_query("SELECT * FROM test_time", self.conn) if self.flavor == "sqlite": # comes back as strings expected = df.applymap(lambda _: _.strftime("%H:%M:%S.%f")) tm.assert_frame_equal(res, expected) def _get_index_columns(self, tbl_name): ixs = sql.read_sql_query( "SELECT * FROM sqlite_master WHERE type = 'index' " + f"AND tbl_name = '{tbl_name}'", self.conn, ) ix_cols = [] for ix_name in ixs.name: ix_info = sql.read_sql_query(f"PRAGMA index_info({ix_name})", self.conn) ix_cols.append(ix_info.name.tolist()) return ix_cols def test_to_sql_save_index(self): self._to_sql_save_index() def test_transactions(self): self._transaction_test() def _get_sqlite_column_type(self, table, column): recs = self.conn.execute(f"PRAGMA table_info({table})") for cid, name, ctype, not_null, default, pk in recs: if name == column: return ctype raise ValueError(f"Table {table}, column {column} not found") def test_dtype(self): if self.flavor == "mysql": pytest.skip("Not applicable to MySQL legacy") cols = ["A", "B"] data = [(0.8, True), (0.9, None)] df = DataFrame(data, columns=cols) df.to_sql("dtype_test", self.conn) df.to_sql("dtype_test2", self.conn, dtype={"B": "STRING"}) # sqlite stores Boolean values as INTEGER assert self._get_sqlite_column_type("dtype_test", "B") == "INTEGER" assert self._get_sqlite_column_type("dtype_test2", "B") == "STRING" msg = r"B \(<class 'bool'>\) not a string" with pytest.raises(ValueError, match=msg): df.to_sql("error", self.conn, dtype={"B": bool}) # single dtype df.to_sql("single_dtype_test", self.conn, dtype="STRING") assert self._get_sqlite_column_type("single_dtype_test", "A") == "STRING" assert self._get_sqlite_column_type("single_dtype_test", "B") == "STRING" def test_notna_dtype(self): if self.flavor == "mysql": pytest.skip("Not applicable to MySQL legacy") cols = { "Bool": Series([True, None]), "Date": Series([datetime(2012, 5, 1), None]), "Int": Series([1, None], dtype="object"), "Float": Series([1.1, None]), } df = DataFrame(cols) tbl = "notna_dtype_test" df.to_sql(tbl, self.conn) assert self._get_sqlite_column_type(tbl, "Bool") == "INTEGER" assert self._get_sqlite_column_type(tbl, "Date") == "TIMESTAMP" assert self._get_sqlite_column_type(tbl, "Int") == "INTEGER" assert self._get_sqlite_column_type(tbl, "Float") == "REAL" def test_illegal_names(self): # For sqlite, these should work fine df = DataFrame([[1, 2], [3, 4]], columns=["a", "b"]) msg = "Empty table or column name specified" with pytest.raises(ValueError, match=msg): df.to_sql("", self.conn) for ndx, weird_name in enumerate( [ "test_weird_name]", "test_weird_name[", "test_weird_name`", 'test_weird_name"', "test_weird_name'", "_b.test_weird_name_01-30", '"_b.test_weird_name_01-30"', "99beginswithnumber", "12345", "\xe9", ] ): df.to_sql(weird_name, self.conn) sql.table_exists(weird_name, self.conn) df2 = DataFrame([[1, 2], [3, 4]], columns=["a", weird_name]) c_tbl = f"test_weird_col_name{ndx:d}" df2.to_sql(c_tbl, self.conn) sql.table_exists(c_tbl, self.conn) # ----------------------------------------------------------------------------- # -- Old tests from 0.13.1 (before refactor using sqlalchemy) def date_format(dt): """Returns date in YYYYMMDD format.""" return dt.strftime("%Y%m%d") _formatters = { datetime: "'{}'".format, str: "'{}'".format, np.str_: "'{}'".format, bytes: "'{}'".format, float: "{:.8f}".format, int: "{:d}".format, type(None): lambda x: "NULL", np.float64: "{:.10f}".format, bool: "'{!s}'".format, } def format_query(sql, *args): processed_args = [] for arg in args: if isinstance(arg, float) and isna(arg): arg = None formatter = _formatters[type(arg)] processed_args.append(formatter(arg)) return sql % tuple(processed_args) def tquery(query, con=None, cur=None): """Replace removed sql.tquery function""" res = sql.execute(query, con=con, cur=cur).fetchall() if res is None: return None else: return list(res) @pytest.mark.single class TestXSQLite(SQLiteMixIn): @pytest.fixture(autouse=True) def setup_method(self, request, datapath): self.method = request.function self.conn = sqlite3.connect(":memory:") # In some test cases we may close db connection # Re-open conn here so we can perform cleanup in teardown yield self.method = request.function self.conn = sqlite3.connect(":memory:") def test_basic(self): frame = tm.makeTimeDataFrame() self._check_roundtrip(frame) def test_write_row_by_row(self): frame = tm.makeTimeDataFrame() frame.iloc[0, 0] = np.nan create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() cur.execute(create_sql) cur = self.conn.cursor() ins = "INSERT INTO test VALUES (%s, %s, %s, %s)" for idx, row in frame.iterrows(): fmt_sql = format_query(ins, *row) tquery(fmt_sql, cur=cur) self.conn.commit() result = sql.read_sql("select * from test", con=self.conn) result.index = frame.index tm.assert_frame_equal(result, frame, rtol=1e-3) def test_execute(self): frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() cur.execute(create_sql) ins = "INSERT INTO test VALUES (?, ?, ?, ?)" row = frame.iloc[0] sql.execute(ins, self.conn, params=tuple(row)) self.conn.commit() result = sql.read_sql("select * from test", self.conn) result.index = frame.index[:1] tm.assert_frame_equal(result, frame[:1]) def test_schema(self): frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test") lines = create_sql.splitlines() for line in lines: tokens = line.split(" ") if len(tokens) == 2 and tokens[0] == "A": assert tokens[1] == "DATETIME" frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test", keys=["A", "B"]) lines = create_sql.splitlines() assert 'PRIMARY KEY ("A", "B")' in create_sql cur = self.conn.cursor() cur.execute(create_sql) def test_execute_fail(self): create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a, b) ); """ cur = self.conn.cursor() cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn) with pytest.raises(sql.DatabaseError, match="Execution failed on sql"): sql.execute('INSERT INTO test VALUES("foo", "bar", 7)', self.conn) def test_execute_closed_connection(self): create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a, b) ); """ cur = self.conn.cursor() cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) self.conn.close() with tm.external_error_raised(sqlite3.ProgrammingError): tquery("select * from test", con=self.conn) def test_na_roundtrip(self): pass def _check_roundtrip(self, frame): sql.to_sql(frame, name="test_table", con=self.conn, index=False) result = sql.read_sql("select * from test_table", self.conn) # HACK! Change this once indexes are handled properly. result.index = frame.index expected = frame tm.assert_frame_equal(result, expected) frame["txt"] = ["a"] * len(frame) frame2 = frame.copy() new_idx = Index(np.arange(len(frame2))) + 10 frame2["Idx"] = new_idx.copy() sql.to_sql(frame2, name="test_table2", con=self.conn, index=False) result = sql.read_sql("select * from test_table2", self.conn, index_col="Idx") expected = frame.copy() expected.index = new_idx expected.index.name = "Idx" tm.assert_frame_equal(expected, result) def test_keyword_as_column_names(self): df = DataFrame({"From": np.ones(5)}) sql.to_sql(df, con=self.conn, name="testkeywords", index=False) def test_onecolumn_of_integer(self): # GH 3628 # a column_of_integers dataframe should transfer well to sql mono_df = DataFrame([1, 2], columns=["c0"]) sql.to_sql(mono_df, con=self.conn, name="mono_df", index=False) # computing the sum via sql con_x = self.conn the_sum = sum(my_c0[0] for my_c0 in con_x.execute("select * from mono_df")) # it should not fail, and gives 3 ( Issue #3628 ) assert the_sum == 3 result = sql.read_sql("select * from mono_df", con_x) tm.assert_frame_equal(result, mono_df) def test_if_exists(self): df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]}) df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]}) table_name = "table_if_exists" sql_select = f"SELECT * FROM {table_name}" def clean_up(test_table_to_drop): """ Drops tables created from individual tests so no dependencies arise from sequential tests """ self.drop_table(test_table_to_drop) msg = "'notvalidvalue' is not valid for if_exists" with pytest.raises(ValueError, match=msg): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="notvalidvalue", ) clean_up(table_name) # test if_exists='fail' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail" ) msg = "Table 'table_if_exists' already exists" with pytest.raises(ValueError, match=msg): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail" ) # test if_exists='replace' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(3, "C"), (4, "D"), (5, "E")] clean_up(table_name) # test if_exists='append' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="append", index=False, ) assert tquery(sql_select, con=self.conn) == [ (1, "A"), (2, "B"), (3, "C"), (4, "D"), (5, "E"), ] clean_up(table_name) @pytest.mark.single @pytest.mark.db @pytest.mark.skip( reason="gh-13611: there is no support for MySQL if SQLAlchemy is not installed" ) class TestXMySQL(MySQLMixIn): @pytest.fixture(autouse=True, scope="class") def setup_class(cls): pymysql = pytest.importorskip("pymysql") pymysql.connect(host="localhost", user="root", passwd="", db="pandas") try: pymysql.connect(read_default_group="pandas") except pymysql.ProgrammingError as err: raise RuntimeError( "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err except pymysql.Error as err: raise RuntimeError( "Cannot connect to database. " "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err @pytest.fixture(autouse=True) def setup_method(self, request, datapath): pymysql = pytest.importorskip("pymysql") pymysql.connect(host="localhost", user="root", passwd="", db="pandas") try: pymysql.connect(read_default_group="pandas") except pymysql.ProgrammingError as err: raise RuntimeError( "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err except pymysql.Error as err: raise RuntimeError( "Cannot connect to database. " "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err self.method = request.function def test_basic(self): frame = tm.makeTimeDataFrame() self._check_roundtrip(frame) def test_write_row_by_row(self): frame = tm.makeTimeDataFrame() frame.iloc[0, 0] = np.nan drop_sql = "DROP TABLE IF EXISTS test" create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) ins = "INSERT INTO test VALUES (%s, %s, %s, %s)" for idx, row in frame.iterrows(): fmt_sql = format_query(ins, *row) tquery(fmt_sql, cur=cur) self.conn.commit() result = sql.read_sql("select * from test", con=self.conn) result.index = frame.index tm.assert_frame_equal(result, frame, rtol=1e-3) # GH#32571 result comes back rounded to 6 digits in some builds; # no obvious pattern def test_chunksize_read_type(self): frame = tm.makeTimeDataFrame() frame.index.name = "index" drop_sql = "DROP TABLE IF EXISTS test" cur = self.conn.cursor() cur.execute(drop_sql) sql.to_sql(frame, name="test", con=self.conn) query = "select * from test" chunksize = 5 chunk_gen = pd.read_sql_query( sql=query, con=self.conn, chunksize=chunksize, index_col="index" ) chunk_df = next(chunk_gen) tm.assert_frame_equal(frame[:chunksize], chunk_df) def test_execute(self): frame = tm.makeTimeDataFrame() drop_sql = "DROP TABLE IF EXISTS test" create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unknown table.*") cur.execute(drop_sql) cur.execute(create_sql) ins = "INSERT INTO test VALUES (%s, %s, %s, %s)" row = frame.iloc[0].values.tolist() sql.execute(ins, self.conn, params=tuple(row)) self.conn.commit() result = sql.read_sql("select * from test", self.conn) result.index = frame.index[:1] tm.assert_frame_equal(result, frame[:1]) def test_schema(self): frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test") lines = create_sql.splitlines() for line in lines: tokens = line.split(" ") if len(tokens) == 2 and tokens[0] == "A": assert tokens[1] == "DATETIME" frame = tm.makeTimeDataFrame() drop_sql = "DROP TABLE IF EXISTS test" create_sql = sql.get_schema(frame, "test", keys=["A", "B"]) lines = create_sql.splitlines() assert "PRIMARY KEY (`A`, `B`)" in create_sql cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) def test_execute_fail(self): drop_sql = "DROP TABLE IF EXISTS test" create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a(5), b(5)) ); """ cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn) with pytest.raises(Exception, match="<insert message here>"): sql.execute('INSERT INTO test VALUES("foo", "bar", 7)', self.conn) def test_execute_closed_connection(self, request, datapath): drop_sql = "DROP TABLE IF EXISTS test" create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a(5), b(5)) ); """ cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) self.conn.close() with pytest.raises(Exception, match="<insert message here>"): tquery("select * from test", con=self.conn) # Initialize connection again (needed for tearDown) self.setup_method(request, datapath) def test_na_roundtrip(self): pass def _check_roundtrip(self, frame): drop_sql = "DROP TABLE IF EXISTS test_table" cur = self.conn.cursor() with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unknown table.*") cur.execute(drop_sql) sql.to_sql(frame, name="test_table", con=self.conn, index=False) result = sql.read_sql("select * from test_table", self.conn) # HACK! Change this once indexes are handled properly. result.index = frame.index result.index.name = frame.index.name expected = frame tm.assert_frame_equal(result, expected) frame["txt"] = ["a"] * len(frame) frame2 = frame.copy() index = Index(np.arange(len(frame2))) + 10 frame2["Idx"] = index drop_sql = "DROP TABLE IF EXISTS test_table2" cur = self.conn.cursor() with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unknown table.*") cur.execute(drop_sql) sql.to_sql(frame2, name="test_table2", con=self.conn, index=False) result = sql.read_sql("select * from test_table2", self.conn, index_col="Idx") expected = frame.copy() # HACK! Change this once indexes are handled properly. expected.index = index expected.index.names = result.index.names tm.assert_frame_equal(expected, result) def test_keyword_as_column_names(self): df = DataFrame({"From": np.ones(5)}) sql.to_sql( df, con=self.conn, name="testkeywords", if_exists="replace", index=False ) def test_if_exists(self): df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]}) df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]}) table_name = "table_if_exists" sql_select = f"SELECT * FROM {table_name}" def clean_up(test_table_to_drop): """ Drops tables created from individual tests so no dependencies arise from sequential tests """ self.drop_table(test_table_to_drop) # test if invalid value for if_exists raises appropriate error with pytest.raises(ValueError, match="<insert message here>"): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="notvalidvalue", ) clean_up(table_name) # test if_exists='fail' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail", index=False, ) with pytest.raises(ValueError, match="<insert message here>"): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail" ) # test if_exists='replace' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(3, "C"), (4, "D"), (5, "E")] clean_up(table_name) # test if_exists='append' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="append", index=False, ) assert tquery(sql_select, con=self.conn) == [ (1, "A"), (2, "B"), (3, "C"), (4, "D"), (5, "E"), ] clean_up(table_name)
gfyoung/pandas
pandas/tests/io/test_sql.py
pandas/core/reshape/api.py
from pandas.core.arrays.base import ( ExtensionArray, ExtensionOpsMixin, ExtensionScalarOpsMixin, ) from pandas.core.arrays.boolean import BooleanArray from pandas.core.arrays.categorical import Categorical from pandas.core.arrays.datetimes import DatetimeArray from pandas.core.arrays.floating import FloatingArray from pandas.core.arrays.integer import IntegerArray from pandas.core.arrays.interval import IntervalArray from pandas.core.arrays.masked import BaseMaskedArray from pandas.core.arrays.numpy_ import PandasArray, PandasDtype from pandas.core.arrays.period import PeriodArray, period_array from pandas.core.arrays.sparse import SparseArray from pandas.core.arrays.string_ import StringArray from pandas.core.arrays.timedeltas import TimedeltaArray __all__ = [ "ExtensionArray", "ExtensionOpsMixin", "ExtensionScalarOpsMixin", "BaseMaskedArray", "BooleanArray", "Categorical", "DatetimeArray", "FloatingArray", "IntegerArray", "IntervalArray", "PandasArray", "PandasDtype", "PeriodArray", "period_array", "SparseArray", "StringArray", "TimedeltaArray", ]
"""SQL io tests The SQL tests are broken down in different classes: - `PandasSQLTest`: base class with common methods for all test classes - Tests for the public API (only tests with sqlite3) - `_TestSQLApi` base class - `TestSQLApi`: test the public API with sqlalchemy engine - `TestSQLiteFallbackApi`: test the public API with a sqlite DBAPI connection - Tests for the different SQL flavors (flavor specific type conversions) - Tests for the sqlalchemy mode: `_TestSQLAlchemy` is the base class with common methods, `_TestSQLAlchemyConn` tests the API with a SQLAlchemy Connection object. The different tested flavors (sqlite3, MySQL, PostgreSQL) derive from the base class - Tests for the fallback mode (`TestSQLiteFallback`) """ import csv from datetime import date, datetime, time from io import StringIO import sqlite3 import warnings import numpy as np import pytest from pandas.core.dtypes.common import is_datetime64_dtype, is_datetime64tz_dtype import pandas as pd from pandas import ( DataFrame, Index, MultiIndex, Series, Timestamp, concat, date_range, isna, to_datetime, to_timedelta, ) import pandas._testing as tm import pandas.io.sql as sql from pandas.io.sql import read_sql_query, read_sql_table try: import sqlalchemy from sqlalchemy.ext import declarative from sqlalchemy.orm import session as sa_session import sqlalchemy.schema import sqlalchemy.sql.sqltypes as sqltypes SQLALCHEMY_INSTALLED = True except ImportError: SQLALCHEMY_INSTALLED = False SQL_STRINGS = { "create_iris": { "sqlite": """CREATE TABLE iris ( "SepalLength" REAL, "SepalWidth" REAL, "PetalLength" REAL, "PetalWidth" REAL, "Name" TEXT )""", "mysql": """CREATE TABLE iris ( `SepalLength` DOUBLE, `SepalWidth` DOUBLE, `PetalLength` DOUBLE, `PetalWidth` DOUBLE, `Name` VARCHAR(200) )""", "postgresql": """CREATE TABLE iris ( "SepalLength" DOUBLE PRECISION, "SepalWidth" DOUBLE PRECISION, "PetalLength" DOUBLE PRECISION, "PetalWidth" DOUBLE PRECISION, "Name" VARCHAR(200) )""", }, "insert_iris": { "sqlite": """INSERT INTO iris VALUES(?, ?, ?, ?, ?)""", "mysql": """INSERT INTO iris VALUES(%s, %s, %s, %s, "%s");""", "postgresql": """INSERT INTO iris VALUES(%s, %s, %s, %s, %s);""", }, "create_test_types": { "sqlite": """CREATE TABLE types_test_data ( "TextCol" TEXT, "DateCol" TEXT, "IntDateCol" INTEGER, "IntDateOnlyCol" INTEGER, "FloatCol" REAL, "IntCol" INTEGER, "BoolCol" INTEGER, "IntColWithNull" INTEGER, "BoolColWithNull" INTEGER )""", "mysql": """CREATE TABLE types_test_data ( `TextCol` TEXT, `DateCol` DATETIME, `IntDateCol` INTEGER, `IntDateOnlyCol` INTEGER, `FloatCol` DOUBLE, `IntCol` INTEGER, `BoolCol` BOOLEAN, `IntColWithNull` INTEGER, `BoolColWithNull` BOOLEAN )""", "postgresql": """CREATE TABLE types_test_data ( "TextCol" TEXT, "DateCol" TIMESTAMP, "DateColWithTz" TIMESTAMP WITH TIME ZONE, "IntDateCol" INTEGER, "IntDateOnlyCol" INTEGER, "FloatCol" DOUBLE PRECISION, "IntCol" INTEGER, "BoolCol" BOOLEAN, "IntColWithNull" INTEGER, "BoolColWithNull" BOOLEAN )""", }, "insert_test_types": { "sqlite": { "query": """ INSERT INTO types_test_data VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?) """, "fields": ( "TextCol", "DateCol", "IntDateCol", "IntDateOnlyCol", "FloatCol", "IntCol", "BoolCol", "IntColWithNull", "BoolColWithNull", ), }, "mysql": { "query": """ INSERT INTO types_test_data VALUES("%s", %s, %s, %s, %s, %s, %s, %s, %s) """, "fields": ( "TextCol", "DateCol", "IntDateCol", "IntDateOnlyCol", "FloatCol", "IntCol", "BoolCol", "IntColWithNull", "BoolColWithNull", ), }, "postgresql": { "query": """ INSERT INTO types_test_data VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) """, "fields": ( "TextCol", "DateCol", "DateColWithTz", "IntDateCol", "IntDateOnlyCol", "FloatCol", "IntCol", "BoolCol", "IntColWithNull", "BoolColWithNull", ), }, }, "read_parameters": { "sqlite": "SELECT * FROM iris WHERE Name=? AND SepalLength=?", "mysql": 'SELECT * FROM iris WHERE `Name`="%s" AND `SepalLength`=%s', "postgresql": 'SELECT * FROM iris WHERE "Name"=%s AND "SepalLength"=%s', }, "read_named_parameters": { "sqlite": """ SELECT * FROM iris WHERE Name=:name AND SepalLength=:length """, "mysql": """ SELECT * FROM iris WHERE `Name`="%(name)s" AND `SepalLength`=%(length)s """, "postgresql": """ SELECT * FROM iris WHERE "Name"=%(name)s AND "SepalLength"=%(length)s """, }, "read_no_parameters_with_percent": { "sqlite": "SELECT * FROM iris WHERE Name LIKE '%'", "mysql": "SELECT * FROM iris WHERE `Name` LIKE '%'", "postgresql": "SELECT * FROM iris WHERE \"Name\" LIKE '%'", }, "create_view": { "sqlite": """ CREATE VIEW iris_view AS SELECT * FROM iris """ }, } class MixInBase: def teardown_method(self, method): # if setup fails, there may not be a connection to close. if hasattr(self, "conn"): for tbl in self._get_all_tables(): self.drop_table(tbl) self._close_conn() class MySQLMixIn(MixInBase): def drop_table(self, table_name): cur = self.conn.cursor() cur.execute(f"DROP TABLE IF EXISTS {sql._get_valid_mysql_name(table_name)}") self.conn.commit() def _get_all_tables(self): cur = self.conn.cursor() cur.execute("SHOW TABLES") return [table[0] for table in cur.fetchall()] def _close_conn(self): from pymysql.err import Error try: self.conn.close() except Error: pass class SQLiteMixIn(MixInBase): def drop_table(self, table_name): self.conn.execute( f"DROP TABLE IF EXISTS {sql._get_valid_sqlite_name(table_name)}" ) self.conn.commit() def _get_all_tables(self): c = self.conn.execute("SELECT name FROM sqlite_master WHERE type='table'") return [table[0] for table in c.fetchall()] def _close_conn(self): self.conn.close() class SQLAlchemyMixIn(MixInBase): def drop_table(self, table_name): sql.SQLDatabase(self.conn).drop_table(table_name) def _get_all_tables(self): meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() table_list = meta.tables.keys() return table_list def _close_conn(self): # https://docs.sqlalchemy.org/en/13/core/connections.html#engine-disposal self.conn.dispose() class PandasSQLTest: """ Base class with common private methods for SQLAlchemy and fallback cases. """ def _get_exec(self): if hasattr(self.conn, "execute"): return self.conn else: return self.conn.cursor() @pytest.fixture(params=[("io", "data", "csv", "iris.csv")]) def load_iris_data(self, datapath, request): iris_csv_file = datapath(*request.param) if not hasattr(self, "conn"): self.setup_connect() self.drop_table("iris") self._get_exec().execute(SQL_STRINGS["create_iris"][self.flavor]) with open(iris_csv_file, mode="r", newline=None) as iris_csv: r = csv.reader(iris_csv) next(r) # skip header row ins = SQL_STRINGS["insert_iris"][self.flavor] for row in r: self._get_exec().execute(ins, row) def _load_iris_view(self): self.drop_table("iris_view") self._get_exec().execute(SQL_STRINGS["create_view"][self.flavor]) def _check_iris_loaded_frame(self, iris_frame): pytype = iris_frame.dtypes[0].type row = iris_frame.iloc[0] assert issubclass(pytype, np.floating) tm.equalContents(row.values, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"]) def _load_test1_data(self): columns = ["index", "A", "B", "C", "D"] data = [ ( "2000-01-03 00:00:00", 0.980268513777, 3.68573087906, -0.364216805298, -1.15973806169, ), ( "2000-01-04 00:00:00", 1.04791624281, -0.0412318367011, -0.16181208307, 0.212549316967, ), ( "2000-01-05 00:00:00", 0.498580885705, 0.731167677815, -0.537677223318, 1.34627041952, ), ( "2000-01-06 00:00:00", 1.12020151869, 1.56762092543, 0.00364077397681, 0.67525259227, ), ] self.test_frame1 = DataFrame(data, columns=columns) def _load_test2_data(self): df = DataFrame( { "A": [4, 1, 3, 6], "B": ["asd", "gsq", "ylt", "jkl"], "C": [1.1, 3.1, 6.9, 5.3], "D": [False, True, True, False], "E": ["1990-11-22", "1991-10-26", "1993-11-26", "1995-12-12"], } ) df["E"] = to_datetime(df["E"]) self.test_frame2 = df def _load_test3_data(self): columns = ["index", "A", "B"] data = [ ("2000-01-03 00:00:00", 2 ** 31 - 1, -1.987670), ("2000-01-04 00:00:00", -29, -0.0412318367011), ("2000-01-05 00:00:00", 20000, 0.731167677815), ("2000-01-06 00:00:00", -290867, 1.56762092543), ] self.test_frame3 = DataFrame(data, columns=columns) def _load_types_test_data(self, data): def _filter_to_flavor(flavor, df): flavor_dtypes = { "sqlite": { "TextCol": "str", "DateCol": "str", "IntDateCol": "int64", "IntDateOnlyCol": "int64", "FloatCol": "float", "IntCol": "int64", "BoolCol": "int64", "IntColWithNull": "float", "BoolColWithNull": "float", }, "mysql": { "TextCol": "str", "DateCol": "str", "IntDateCol": "int64", "IntDateOnlyCol": "int64", "FloatCol": "float", "IntCol": "int64", "BoolCol": "bool", "IntColWithNull": "float", "BoolColWithNull": "float", }, "postgresql": { "TextCol": "str", "DateCol": "str", "DateColWithTz": "str", "IntDateCol": "int64", "IntDateOnlyCol": "int64", "FloatCol": "float", "IntCol": "int64", "BoolCol": "bool", "IntColWithNull": "float", "BoolColWithNull": "float", }, } dtypes = flavor_dtypes[flavor] return df[dtypes.keys()].astype(dtypes) df = DataFrame(data) self.types_test = { flavor: _filter_to_flavor(flavor, df) for flavor in ("sqlite", "mysql", "postgresql") } def _load_raw_sql(self): self.drop_table("types_test_data") self._get_exec().execute(SQL_STRINGS["create_test_types"][self.flavor]) ins = SQL_STRINGS["insert_test_types"][self.flavor] data = [ { "TextCol": "first", "DateCol": "2000-01-03 00:00:00", "DateColWithTz": "2000-01-01 00:00:00-08:00", "IntDateCol": 535852800, "IntDateOnlyCol": 20101010, "FloatCol": 10.10, "IntCol": 1, "BoolCol": False, "IntColWithNull": 1, "BoolColWithNull": False, }, { "TextCol": "first", "DateCol": "2000-01-04 00:00:00", "DateColWithTz": "2000-06-01 00:00:00-07:00", "IntDateCol": 1356998400, "IntDateOnlyCol": 20101212, "FloatCol": 10.10, "IntCol": 1, "BoolCol": False, "IntColWithNull": None, "BoolColWithNull": None, }, ] for d in data: self._get_exec().execute( ins["query"], [d[field] for field in ins["fields"]] ) self._load_types_test_data(data) def _count_rows(self, table_name): result = ( self._get_exec() .execute(f"SELECT count(*) AS count_1 FROM {table_name}") .fetchone() ) return result[0] def _read_sql_iris(self): iris_frame = self.pandasSQL.read_query("SELECT * FROM iris") self._check_iris_loaded_frame(iris_frame) def _read_sql_iris_parameter(self): query = SQL_STRINGS["read_parameters"][self.flavor] params = ["Iris-setosa", 5.1] iris_frame = self.pandasSQL.read_query(query, params=params) self._check_iris_loaded_frame(iris_frame) def _read_sql_iris_named_parameter(self): query = SQL_STRINGS["read_named_parameters"][self.flavor] params = {"name": "Iris-setosa", "length": 5.1} iris_frame = self.pandasSQL.read_query(query, params=params) self._check_iris_loaded_frame(iris_frame) def _read_sql_iris_no_parameter_with_percent(self): query = SQL_STRINGS["read_no_parameters_with_percent"][self.flavor] iris_frame = self.pandasSQL.read_query(query, params=None) self._check_iris_loaded_frame(iris_frame) def _to_sql(self, method=None): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", method=method) assert self.pandasSQL.has_table("test_frame1") num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries # Nuke table self.drop_table("test_frame1") def _to_sql_empty(self): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1.iloc[:0], "test_frame1") def _to_sql_fail(self): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") assert self.pandasSQL.has_table("test_frame1") msg = "Table 'test_frame1' already exists" with pytest.raises(ValueError, match=msg): self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") self.drop_table("test_frame1") def _to_sql_replace(self): self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") # Add to table again self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="replace") assert self.pandasSQL.has_table("test_frame1") num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries self.drop_table("test_frame1") def _to_sql_append(self): # Nuke table just in case self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail") # Add to table again self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="append") assert self.pandasSQL.has_table("test_frame1") num_entries = 2 * len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries self.drop_table("test_frame1") def _to_sql_method_callable(self): check = [] # used to double check function below is really being used def sample(pd_table, conn, keys, data_iter): check.append(1) data = [dict(zip(keys, row)) for row in data_iter] conn.execute(pd_table.table.insert(), data) self.drop_table("test_frame1") self.pandasSQL.to_sql(self.test_frame1, "test_frame1", method=sample) assert self.pandasSQL.has_table("test_frame1") assert check == [1] num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame1") assert num_rows == num_entries # Nuke table self.drop_table("test_frame1") def _roundtrip(self): self.drop_table("test_frame_roundtrip") self.pandasSQL.to_sql(self.test_frame1, "test_frame_roundtrip") result = self.pandasSQL.read_query("SELECT * FROM test_frame_roundtrip") result.set_index("level_0", inplace=True) # result.index.astype(int) result.index.name = None tm.assert_frame_equal(result, self.test_frame1) def _execute_sql(self): # drop_sql = "DROP TABLE IF EXISTS test" # should already be done iris_results = self.pandasSQL.execute("SELECT * FROM iris") row = iris_results.fetchone() tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"]) def _to_sql_save_index(self): df = DataFrame.from_records( [(1, 2.1, "line1"), (2, 1.5, "line2")], columns=["A", "B", "C"], index=["A"] ) self.pandasSQL.to_sql(df, "test_to_sql_saves_index") ix_cols = self._get_index_columns("test_to_sql_saves_index") assert ix_cols == [["A"]] def _transaction_test(self): with self.pandasSQL.run_transaction() as trans: trans.execute("CREATE TABLE test_trans (A INT, B TEXT)") class DummyException(Exception): pass # Make sure when transaction is rolled back, no rows get inserted ins_sql = "INSERT INTO test_trans (A,B) VALUES (1, 'blah')" try: with self.pandasSQL.run_transaction() as trans: trans.execute(ins_sql) raise DummyException("error") except DummyException: # ignore raised exception pass res = self.pandasSQL.read_query("SELECT * FROM test_trans") assert len(res) == 0 # Make sure when transaction is committed, rows do get inserted with self.pandasSQL.run_transaction() as trans: trans.execute(ins_sql) res2 = self.pandasSQL.read_query("SELECT * FROM test_trans") assert len(res2) == 1 # ----------------------------------------------------------------------------- # -- Testing the public API class _TestSQLApi(PandasSQLTest): """ Base class to test the public API. From this two classes are derived to run these tests for both the sqlalchemy mode (`TestSQLApi`) and the fallback mode (`TestSQLiteFallbackApi`). These tests are run with sqlite3. Specific tests for the different sql flavours are included in `_TestSQLAlchemy`. Notes: flavor can always be passed even in SQLAlchemy mode, should be correctly ignored. we don't use drop_table because that isn't part of the public api """ flavor = "sqlite" mode: str def setup_connect(self): self.conn = self.connect() @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): self.load_test_data_and_sql() def load_test_data_and_sql(self): self._load_iris_view() self._load_test1_data() self._load_test2_data() self._load_test3_data() self._load_raw_sql() def test_read_sql_iris(self): iris_frame = sql.read_sql_query("SELECT * FROM iris", self.conn) self._check_iris_loaded_frame(iris_frame) def test_read_sql_view(self): iris_frame = sql.read_sql_query("SELECT * FROM iris_view", self.conn) self._check_iris_loaded_frame(iris_frame) def test_read_sql_with_chunksize_no_result(self): query = "SELECT * FROM iris_view WHERE SepalLength < 0.0" with_batch = sql.read_sql_query(query, self.conn, chunksize=5) without_batch = sql.read_sql_query(query, self.conn) tm.assert_frame_equal(pd.concat(with_batch), without_batch) def test_to_sql(self): sql.to_sql(self.test_frame1, "test_frame1", self.conn) assert sql.has_table("test_frame1", self.conn) def test_to_sql_fail(self): sql.to_sql(self.test_frame1, "test_frame2", self.conn, if_exists="fail") assert sql.has_table("test_frame2", self.conn) msg = "Table 'test_frame2' already exists" with pytest.raises(ValueError, match=msg): sql.to_sql(self.test_frame1, "test_frame2", self.conn, if_exists="fail") def test_to_sql_replace(self): sql.to_sql(self.test_frame1, "test_frame3", self.conn, if_exists="fail") # Add to table again sql.to_sql(self.test_frame1, "test_frame3", self.conn, if_exists="replace") assert sql.has_table("test_frame3", self.conn) num_entries = len(self.test_frame1) num_rows = self._count_rows("test_frame3") assert num_rows == num_entries def test_to_sql_append(self): sql.to_sql(self.test_frame1, "test_frame4", self.conn, if_exists="fail") # Add to table again sql.to_sql(self.test_frame1, "test_frame4", self.conn, if_exists="append") assert sql.has_table("test_frame4", self.conn) num_entries = 2 * len(self.test_frame1) num_rows = self._count_rows("test_frame4") assert num_rows == num_entries def test_to_sql_type_mapping(self): sql.to_sql(self.test_frame3, "test_frame5", self.conn, index=False) result = sql.read_sql("SELECT * FROM test_frame5", self.conn) tm.assert_frame_equal(self.test_frame3, result) def test_to_sql_series(self): s = Series(np.arange(5, dtype="int64"), name="series") sql.to_sql(s, "test_series", self.conn, index=False) s2 = sql.read_sql_query("SELECT * FROM test_series", self.conn) tm.assert_frame_equal(s.to_frame(), s2) def test_roundtrip(self): sql.to_sql(self.test_frame1, "test_frame_roundtrip", con=self.conn) result = sql.read_sql_query("SELECT * FROM test_frame_roundtrip", con=self.conn) # HACK! result.index = self.test_frame1.index result.set_index("level_0", inplace=True) result.index.astype(int) result.index.name = None tm.assert_frame_equal(result, self.test_frame1) def test_roundtrip_chunksize(self): sql.to_sql( self.test_frame1, "test_frame_roundtrip", con=self.conn, index=False, chunksize=2, ) result = sql.read_sql_query("SELECT * FROM test_frame_roundtrip", con=self.conn) tm.assert_frame_equal(result, self.test_frame1) def test_execute_sql(self): # drop_sql = "DROP TABLE IF EXISTS test" # should already be done iris_results = sql.execute("SELECT * FROM iris", con=self.conn) row = iris_results.fetchone() tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"]) def test_date_parsing(self): # Test date parsing in read_sql # No Parsing df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn) assert not issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates=["DateCol"] ) assert issubclass(df.DateCol.dtype.type, np.datetime64) assert df.DateCol.tolist() == [ Timestamp(2000, 1, 3, 0, 0, 0), Timestamp(2000, 1, 4, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates={"DateCol": "%Y-%m-%d %H:%M:%S"}, ) assert issubclass(df.DateCol.dtype.type, np.datetime64) assert df.DateCol.tolist() == [ Timestamp(2000, 1, 3, 0, 0, 0), Timestamp(2000, 1, 4, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates=["IntDateCol"] ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) assert df.IntDateCol.tolist() == [ Timestamp(1986, 12, 25, 0, 0, 0), Timestamp(2013, 1, 1, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates={"IntDateCol": "s"} ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) assert df.IntDateCol.tolist() == [ Timestamp(1986, 12, 25, 0, 0, 0), Timestamp(2013, 1, 1, 0, 0, 0), ] df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, parse_dates={"IntDateOnlyCol": "%Y%m%d"}, ) assert issubclass(df.IntDateOnlyCol.dtype.type, np.datetime64) assert df.IntDateOnlyCol.tolist() == [ Timestamp("2010-10-10"), Timestamp("2010-12-12"), ] @pytest.mark.parametrize("error", ["ignore", "raise", "coerce"]) @pytest.mark.parametrize( "read_sql, text, mode", [ (sql.read_sql, "SELECT * FROM types_test_data", ("sqlalchemy", "fallback")), (sql.read_sql, "types_test_data", ("sqlalchemy")), ( sql.read_sql_query, "SELECT * FROM types_test_data", ("sqlalchemy", "fallback"), ), (sql.read_sql_table, "types_test_data", ("sqlalchemy")), ], ) def test_custom_dateparsing_error(self, read_sql, text, mode, error): if self.mode in mode: expected = self.types_test[self.flavor].astype( {"DateCol": "datetime64[ns]"} ) result = read_sql( text, con=self.conn, parse_dates={ "DateCol": {"errors": error}, }, ) tm.assert_frame_equal(result, expected) def test_date_and_index(self): # Test case where same column appears in parse_date and index_col df = sql.read_sql_query( "SELECT * FROM types_test_data", self.conn, index_col="DateCol", parse_dates=["DateCol", "IntDateCol"], ) assert issubclass(df.index.dtype.type, np.datetime64) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) def test_timedelta(self): # see #6921 df = to_timedelta(Series(["00:00:01", "00:00:03"], name="foo")).to_frame() with tm.assert_produces_warning(UserWarning): df.to_sql("test_timedelta", self.conn) result = sql.read_sql_query("SELECT * FROM test_timedelta", self.conn) tm.assert_series_equal(result["foo"], df["foo"].view("int64")) def test_complex_raises(self): df = DataFrame({"a": [1 + 1j, 2j]}) msg = "Complex datatypes not supported" with pytest.raises(ValueError, match=msg): df.to_sql("test_complex", self.conn) @pytest.mark.parametrize( "index_name,index_label,expected", [ # no index name, defaults to 'index' (None, None, "index"), # specifying index_label (None, "other_label", "other_label"), # using the index name ("index_name", None, "index_name"), # has index name, but specifying index_label ("index_name", "other_label", "other_label"), # index name is integer (0, None, "0"), # index name is None but index label is integer (None, 0, "0"), ], ) def test_to_sql_index_label(self, index_name, index_label, expected): temp_frame = DataFrame({"col1": range(4)}) temp_frame.index.name = index_name query = "SELECT * FROM test_index_label" sql.to_sql(temp_frame, "test_index_label", self.conn, index_label=index_label) frame = sql.read_sql_query(query, self.conn) assert frame.columns[0] == expected def test_to_sql_index_label_multiindex(self): temp_frame = DataFrame( {"col1": range(4)}, index=MultiIndex.from_product([("A0", "A1"), ("B0", "B1")]), ) # no index name, defaults to 'level_0' and 'level_1' sql.to_sql(temp_frame, "test_index_label", self.conn) frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[0] == "level_0" assert frame.columns[1] == "level_1" # specifying index_label sql.to_sql( temp_frame, "test_index_label", self.conn, if_exists="replace", index_label=["A", "B"], ) frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[:2].tolist() == ["A", "B"] # using the index name temp_frame.index.names = ["A", "B"] sql.to_sql(temp_frame, "test_index_label", self.conn, if_exists="replace") frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[:2].tolist() == ["A", "B"] # has index name, but specifying index_label sql.to_sql( temp_frame, "test_index_label", self.conn, if_exists="replace", index_label=["C", "D"], ) frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn) assert frame.columns[:2].tolist() == ["C", "D"] msg = "Length of 'index_label' should match number of levels, which is 2" with pytest.raises(ValueError, match=msg): sql.to_sql( temp_frame, "test_index_label", self.conn, if_exists="replace", index_label="C", ) def test_multiindex_roundtrip(self): df = DataFrame.from_records( [(1, 2.1, "line1"), (2, 1.5, "line2")], columns=["A", "B", "C"], index=["A", "B"], ) df.to_sql("test_multiindex_roundtrip", self.conn) result = sql.read_sql_query( "SELECT * FROM test_multiindex_roundtrip", self.conn, index_col=["A", "B"] ) tm.assert_frame_equal(df, result, check_index_type=True) @pytest.mark.parametrize( "dtype", [ None, int, float, {"A": int, "B": float}, ], ) def test_dtype_argument(self, dtype): # GH10285 Add dtype argument to read_sql_query df = DataFrame([[1.2, 3.4], [5.6, 7.8]], columns=["A", "B"]) df.to_sql("test_dtype_argument", self.conn) expected = df.astype(dtype) result = sql.read_sql_query( "SELECT A, B FROM test_dtype_argument", con=self.conn, dtype=dtype ) tm.assert_frame_equal(result, expected) def test_integer_col_names(self): df = DataFrame([[1, 2], [3, 4]], columns=[0, 1]) sql.to_sql(df, "test_frame_integer_col_names", self.conn, if_exists="replace") def test_get_schema(self): create_sql = sql.get_schema(self.test_frame1, "test", con=self.conn) assert "CREATE" in create_sql def test_get_schema_with_schema(self): # GH28486 create_sql = sql.get_schema( self.test_frame1, "test", con=self.conn, schema="pypi" ) assert "CREATE TABLE pypi." in create_sql def test_get_schema_dtypes(self): float_frame = DataFrame({"a": [1.1, 1.2], "b": [2.1, 2.2]}) dtype = sqlalchemy.Integer if self.mode == "sqlalchemy" else "INTEGER" create_sql = sql.get_schema( float_frame, "test", con=self.conn, dtype={"b": dtype} ) assert "CREATE" in create_sql assert "INTEGER" in create_sql def test_get_schema_keys(self): frame = DataFrame({"Col1": [1.1, 1.2], "Col2": [2.1, 2.2]}) create_sql = sql.get_schema(frame, "test", con=self.conn, keys="Col1") constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("Col1")' assert constraint_sentence in create_sql # multiple columns as key (GH10385) create_sql = sql.get_schema( self.test_frame1, "test", con=self.conn, keys=["A", "B"] ) constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("A", "B")' assert constraint_sentence in create_sql def test_chunksize_read(self): df = DataFrame(np.random.randn(22, 5), columns=list("abcde")) df.to_sql("test_chunksize", self.conn, index=False) # reading the query in one time res1 = sql.read_sql_query("select * from test_chunksize", self.conn) # reading the query in chunks with read_sql_query res2 = DataFrame() i = 0 sizes = [5, 5, 5, 5, 2] for chunk in sql.read_sql_query( "select * from test_chunksize", self.conn, chunksize=5 ): res2 = concat([res2, chunk], ignore_index=True) assert len(chunk) == sizes[i] i += 1 tm.assert_frame_equal(res1, res2) # reading the query in chunks with read_sql_query if self.mode == "sqlalchemy": res3 = DataFrame() i = 0 sizes = [5, 5, 5, 5, 2] for chunk in sql.read_sql_table("test_chunksize", self.conn, chunksize=5): res3 = concat([res3, chunk], ignore_index=True) assert len(chunk) == sizes[i] i += 1 tm.assert_frame_equal(res1, res3) def test_categorical(self): # GH8624 # test that categorical gets written correctly as dense column df = DataFrame( { "person_id": [1, 2, 3], "person_name": ["John P. Doe", "Jane Dove", "John P. Doe"], } ) df2 = df.copy() df2["person_name"] = df2["person_name"].astype("category") df2.to_sql("test_categorical", self.conn, index=False) res = sql.read_sql_query("SELECT * FROM test_categorical", self.conn) tm.assert_frame_equal(res, df) def test_unicode_column_name(self): # GH 11431 df = DataFrame([[1, 2], [3, 4]], columns=["\xe9", "b"]) df.to_sql("test_unicode", self.conn, index=False) def test_escaped_table_name(self): # GH 13206 df = DataFrame({"A": [0, 1, 2], "B": [0.2, np.nan, 5.6]}) df.to_sql("d1187b08-4943-4c8d-a7f6", self.conn, index=False) res = sql.read_sql_query("SELECT * FROM `d1187b08-4943-4c8d-a7f6`", self.conn) tm.assert_frame_equal(res, df) @pytest.mark.single @pytest.mark.skipif(not SQLALCHEMY_INSTALLED, reason="SQLAlchemy not installed") class TestSQLApi(SQLAlchemyMixIn, _TestSQLApi): """ Test the public API as it would be used directly Tests for `read_sql_table` are included here, as this is specific for the sqlalchemy mode. """ flavor = "sqlite" mode = "sqlalchemy" def connect(self): return sqlalchemy.create_engine("sqlite:///:memory:") def test_read_table_columns(self): # test columns argument in read_table sql.to_sql(self.test_frame1, "test_frame", self.conn) cols = ["A", "B"] result = sql.read_sql_table("test_frame", self.conn, columns=cols) assert result.columns.tolist() == cols def test_read_table_index_col(self): # test columns argument in read_table sql.to_sql(self.test_frame1, "test_frame", self.conn) result = sql.read_sql_table("test_frame", self.conn, index_col="index") assert result.index.names == ["index"] result = sql.read_sql_table("test_frame", self.conn, index_col=["A", "B"]) assert result.index.names == ["A", "B"] result = sql.read_sql_table( "test_frame", self.conn, index_col=["A", "B"], columns=["C", "D"] ) assert result.index.names == ["A", "B"] assert result.columns.tolist() == ["C", "D"] def test_read_sql_delegate(self): iris_frame1 = sql.read_sql_query("SELECT * FROM iris", self.conn) iris_frame2 = sql.read_sql("SELECT * FROM iris", self.conn) tm.assert_frame_equal(iris_frame1, iris_frame2) iris_frame1 = sql.read_sql_table("iris", self.conn) iris_frame2 = sql.read_sql("iris", self.conn) tm.assert_frame_equal(iris_frame1, iris_frame2) def test_not_reflect_all_tables(self): # create invalid table qry = """CREATE TABLE invalid (x INTEGER, y UNKNOWN);""" self.conn.execute(qry) qry = """CREATE TABLE other_table (x INTEGER, y INTEGER);""" self.conn.execute(qry) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # Trigger a warning. sql.read_sql_table("other_table", self.conn) sql.read_sql_query("SELECT * FROM other_table", self.conn) # Verify some things assert len(w) == 0 def test_warning_case_insensitive_table_name(self): # see gh-7815 # # We can't test that this warning is triggered, a the database # configuration would have to be altered. But here we test that # the warning is certainly NOT triggered in a normal case. with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # This should not trigger a Warning self.test_frame1.to_sql("CaseSensitive", self.conn) # Verify some things assert len(w) == 0 def _get_index_columns(self, tbl_name): from sqlalchemy.engine import reflection insp = reflection.Inspector.from_engine(self.conn) ixs = insp.get_indexes("test_index_saved") ixs = [i["column_names"] for i in ixs] return ixs def test_sqlalchemy_type_mapping(self): # Test Timestamp objects (no datetime64 because of timezone) (GH9085) df = DataFrame( {"time": to_datetime(["201412120154", "201412110254"], utc=True)} ) db = sql.SQLDatabase(self.conn) table = sql.SQLTable("test_type", db, frame=df) # GH 9086: TIMESTAMP is the suggested type for datetimes with timezones assert isinstance(table.table.c["time"].type, sqltypes.TIMESTAMP) @pytest.mark.parametrize( "integer, expected", [ ("int8", "SMALLINT"), ("Int8", "SMALLINT"), ("uint8", "SMALLINT"), ("UInt8", "SMALLINT"), ("int16", "SMALLINT"), ("Int16", "SMALLINT"), ("uint16", "INTEGER"), ("UInt16", "INTEGER"), ("int32", "INTEGER"), ("Int32", "INTEGER"), ("uint32", "BIGINT"), ("UInt32", "BIGINT"), ("int64", "BIGINT"), ("Int64", "BIGINT"), (int, "BIGINT" if np.dtype(int).name == "int64" else "INTEGER"), ], ) def test_sqlalchemy_integer_mapping(self, integer, expected): # GH35076 Map pandas integer to optimal SQLAlchemy integer type df = DataFrame([0, 1], columns=["a"], dtype=integer) db = sql.SQLDatabase(self.conn) table = sql.SQLTable("test_type", db, frame=df) result = str(table.table.c.a.type) assert result == expected @pytest.mark.parametrize("integer", ["uint64", "UInt64"]) def test_sqlalchemy_integer_overload_mapping(self, integer): # GH35076 Map pandas integer to optimal SQLAlchemy integer type df = DataFrame([0, 1], columns=["a"], dtype=integer) db = sql.SQLDatabase(self.conn) with pytest.raises( ValueError, match="Unsigned 64 bit integer datatype is not supported" ): sql.SQLTable("test_type", db, frame=df) def test_database_uri_string(self): # Test read_sql and .to_sql method with a database URI (GH10654) test_frame1 = self.test_frame1 # db_uri = 'sqlite:///:memory:' # raises # sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) near # "iris": syntax error [SQL: 'iris'] with tm.ensure_clean() as name: db_uri = "sqlite:///" + name table = "iris" test_frame1.to_sql(table, db_uri, if_exists="replace", index=False) test_frame2 = sql.read_sql(table, db_uri) test_frame3 = sql.read_sql_table(table, db_uri) query = "SELECT * FROM iris" test_frame4 = sql.read_sql_query(query, db_uri) tm.assert_frame_equal(test_frame1, test_frame2) tm.assert_frame_equal(test_frame1, test_frame3) tm.assert_frame_equal(test_frame1, test_frame4) # using driver that will not be installed on Travis to trigger error # in sqlalchemy.create_engine -> test passing of this error to user try: # the rest of this test depends on pg8000's being absent import pg8000 # noqa pytest.skip("pg8000 is installed") except ImportError: pass db_uri = "postgresql+pg8000://user:pass@host/dbname" with pytest.raises(ImportError, match="pg8000"): sql.read_sql("select * from table", db_uri) def _make_iris_table_metadata(self): sa = sqlalchemy metadata = sa.MetaData() iris = sa.Table( "iris", metadata, sa.Column("SepalLength", sa.REAL), sa.Column("SepalWidth", sa.REAL), sa.Column("PetalLength", sa.REAL), sa.Column("PetalWidth", sa.REAL), sa.Column("Name", sa.TEXT), ) return iris def test_query_by_text_obj(self): # WIP : GH10846 name_text = sqlalchemy.text("select * from iris where name=:name") iris_df = sql.read_sql(name_text, self.conn, params={"name": "Iris-versicolor"}) all_names = set(iris_df["Name"]) assert all_names == {"Iris-versicolor"} def test_query_by_select_obj(self): # WIP : GH10846 iris = self._make_iris_table_metadata() name_select = sqlalchemy.select([iris]).where( iris.c.Name == sqlalchemy.bindparam("name") ) iris_df = sql.read_sql(name_select, self.conn, params={"name": "Iris-setosa"}) all_names = set(iris_df["Name"]) assert all_names == {"Iris-setosa"} def test_column_with_percentage(self): # GH 37157 df = DataFrame({"A": [0, 1, 2], "%_variation": [3, 4, 5]}) df.to_sql("test_column_percentage", self.conn, index=False) res = sql.read_sql_table("test_column_percentage", self.conn) tm.assert_frame_equal(res, df) class _EngineToConnMixin: """ A mixin that causes setup_connect to create a conn rather than an engine. """ @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): super().load_test_data_and_sql() engine = self.conn conn = engine.connect() self.__tx = conn.begin() self.pandasSQL = sql.SQLDatabase(conn) self.__engine = engine self.conn = conn yield self.__tx.rollback() self.conn.close() self.conn = self.__engine self.pandasSQL = sql.SQLDatabase(self.__engine) @pytest.mark.single class TestSQLApiConn(_EngineToConnMixin, TestSQLApi): pass @pytest.mark.single class TestSQLiteFallbackApi(SQLiteMixIn, _TestSQLApi): """ Test the public sqlite connection fallback API """ flavor = "sqlite" mode = "fallback" def connect(self, database=":memory:"): return sqlite3.connect(database) def test_sql_open_close(self): # Test if the IO in the database still work if the connection closed # between the writing and reading (as in many real situations). with tm.ensure_clean() as name: conn = self.connect(name) sql.to_sql(self.test_frame3, "test_frame3_legacy", conn, index=False) conn.close() conn = self.connect(name) result = sql.read_sql_query("SELECT * FROM test_frame3_legacy;", conn) conn.close() tm.assert_frame_equal(self.test_frame3, result) @pytest.mark.skipif(SQLALCHEMY_INSTALLED, reason="SQLAlchemy is installed") def test_con_string_import_error(self): conn = "mysql://root@localhost/pandas" msg = "Using URI string without sqlalchemy installed" with pytest.raises(ImportError, match=msg): sql.read_sql("SELECT * FROM iris", conn) def test_read_sql_delegate(self): iris_frame1 = sql.read_sql_query("SELECT * FROM iris", self.conn) iris_frame2 = sql.read_sql("SELECT * FROM iris", self.conn) tm.assert_frame_equal(iris_frame1, iris_frame2) msg = "Execution failed on sql 'iris': near \"iris\": syntax error" with pytest.raises(sql.DatabaseError, match=msg): sql.read_sql("iris", self.conn) def test_safe_names_warning(self): # GH 6798 df = DataFrame([[1, 2], [3, 4]], columns=["a", "b "]) # has a space # warns on create table with spaces in names with tm.assert_produces_warning(): sql.to_sql(df, "test_frame3_legacy", self.conn, index=False) def test_get_schema2(self): # without providing a connection object (available for backwards comp) create_sql = sql.get_schema(self.test_frame1, "test") assert "CREATE" in create_sql def _get_sqlite_column_type(self, schema, column): for col in schema.split("\n"): if col.split()[0].strip('""') == column: return col.split()[1] raise ValueError(f"Column {column} not found") def test_sqlite_type_mapping(self): # Test Timestamp objects (no datetime64 because of timezone) (GH9085) df = DataFrame( {"time": to_datetime(["201412120154", "201412110254"], utc=True)} ) db = sql.SQLiteDatabase(self.conn) table = sql.SQLiteTable("test_type", db, frame=df) schema = table.sql_schema() assert self._get_sqlite_column_type(schema, "time") == "TIMESTAMP" # ----------------------------------------------------------------------------- # -- Database flavor specific tests class _TestSQLAlchemy(SQLAlchemyMixIn, PandasSQLTest): """ Base class for testing the sqlalchemy backend. Subclasses for specific database types are created below. Tests that deviate for each flavor are overwritten there. """ flavor: str @pytest.fixture(autouse=True, scope="class") def setup_class(cls): cls.setup_import() cls.setup_driver() conn = cls.conn = cls.connect() conn.connect() def load_test_data_and_sql(self): self._load_raw_sql() self._load_test1_data() @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): self.load_test_data_and_sql() @classmethod def setup_import(cls): # Skip this test if SQLAlchemy not available if not SQLALCHEMY_INSTALLED: pytest.skip("SQLAlchemy not installed") @classmethod def setup_driver(cls): raise NotImplementedError() @classmethod def connect(cls): raise NotImplementedError() def setup_connect(self): try: self.conn = self.connect() self.pandasSQL = sql.SQLDatabase(self.conn) # to test if connection can be made: self.conn.connect() except sqlalchemy.exc.OperationalError: pytest.skip(f"Can't connect to {self.flavor} server") def test_read_sql(self): self._read_sql_iris() def test_read_sql_parameter(self): self._read_sql_iris_parameter() def test_read_sql_named_parameter(self): self._read_sql_iris_named_parameter() def test_to_sql(self): self._to_sql() def test_to_sql_empty(self): self._to_sql_empty() def test_to_sql_fail(self): self._to_sql_fail() def test_to_sql_replace(self): self._to_sql_replace() def test_to_sql_append(self): self._to_sql_append() def test_to_sql_method_multi(self): self._to_sql(method="multi") def test_to_sql_method_callable(self): self._to_sql_method_callable() def test_create_table(self): temp_conn = self.connect() temp_frame = DataFrame( {"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]} ) pandasSQL = sql.SQLDatabase(temp_conn) pandasSQL.to_sql(temp_frame, "temp_frame") assert temp_conn.has_table("temp_frame") def test_drop_table(self): temp_conn = self.connect() temp_frame = DataFrame( {"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]} ) pandasSQL = sql.SQLDatabase(temp_conn) pandasSQL.to_sql(temp_frame, "temp_frame") assert temp_conn.has_table("temp_frame") pandasSQL.drop_table("temp_frame") assert not temp_conn.has_table("temp_frame") def test_roundtrip(self): self._roundtrip() def test_execute_sql(self): self._execute_sql() def test_read_table(self): iris_frame = sql.read_sql_table("iris", con=self.conn) self._check_iris_loaded_frame(iris_frame) def test_read_table_columns(self): iris_frame = sql.read_sql_table( "iris", con=self.conn, columns=["SepalLength", "SepalLength"] ) tm.equalContents(iris_frame.columns.values, ["SepalLength", "SepalLength"]) def test_read_table_absent_raises(self): msg = "Table this_doesnt_exist not found" with pytest.raises(ValueError, match=msg): sql.read_sql_table("this_doesnt_exist", con=self.conn) def test_default_type_conversion(self): df = sql.read_sql_table("types_test_data", self.conn) assert issubclass(df.FloatCol.dtype.type, np.floating) assert issubclass(df.IntCol.dtype.type, np.integer) assert issubclass(df.BoolCol.dtype.type, np.bool_) # Int column with NA values stays as float assert issubclass(df.IntColWithNull.dtype.type, np.floating) # Bool column with NA values becomes object assert issubclass(df.BoolColWithNull.dtype.type, object) def test_bigint(self): # int64 should be converted to BigInteger, GH7433 df = DataFrame(data={"i64": [2 ** 62]}) df.to_sql("test_bigint", self.conn, index=False) result = sql.read_sql_table("test_bigint", self.conn) tm.assert_frame_equal(df, result) def test_default_date_load(self): df = sql.read_sql_table("types_test_data", self.conn) # IMPORTANT - sqlite has no native date type, so shouldn't parse, but # MySQL SHOULD be converted. assert issubclass(df.DateCol.dtype.type, np.datetime64) def test_datetime_with_timezone(self): # edge case that converts postgresql datetime with time zone types # to datetime64[ns,psycopg2.tz.FixedOffsetTimezone..], which is ok # but should be more natural, so coerce to datetime64[ns] for now def check(col): # check that a column is either datetime64[ns] # or datetime64[ns, UTC] if is_datetime64_dtype(col.dtype): # "2000-01-01 00:00:00-08:00" should convert to # "2000-01-01 08:00:00" assert col[0] == Timestamp("2000-01-01 08:00:00") # "2000-06-01 00:00:00-07:00" should convert to # "2000-06-01 07:00:00" assert col[1] == Timestamp("2000-06-01 07:00:00") elif is_datetime64tz_dtype(col.dtype): assert str(col.dt.tz) == "UTC" # "2000-01-01 00:00:00-08:00" should convert to # "2000-01-01 08:00:00" # "2000-06-01 00:00:00-07:00" should convert to # "2000-06-01 07:00:00" # GH 6415 expected_data = [ Timestamp("2000-01-01 08:00:00", tz="UTC"), Timestamp("2000-06-01 07:00:00", tz="UTC"), ] expected = Series(expected_data, name=col.name) tm.assert_series_equal(col, expected) else: raise AssertionError( f"DateCol loaded with incorrect type -> {col.dtype}" ) # GH11216 df = pd.read_sql_query("select * from types_test_data", self.conn) if not hasattr(df, "DateColWithTz"): pytest.skip("no column with datetime with time zone") # this is parsed on Travis (linux), but not on macosx for some reason # even with the same versions of psycopg2 & sqlalchemy, possibly a # Postgresql server version difference col = df.DateColWithTz assert is_datetime64tz_dtype(col.dtype) df = pd.read_sql_query( "select * from types_test_data", self.conn, parse_dates=["DateColWithTz"] ) if not hasattr(df, "DateColWithTz"): pytest.skip("no column with datetime with time zone") col = df.DateColWithTz assert is_datetime64tz_dtype(col.dtype) assert str(col.dt.tz) == "UTC" check(df.DateColWithTz) df = pd.concat( list( pd.read_sql_query( "select * from types_test_data", self.conn, chunksize=1 ) ), ignore_index=True, ) col = df.DateColWithTz assert is_datetime64tz_dtype(col.dtype) assert str(col.dt.tz) == "UTC" expected = sql.read_sql_table("types_test_data", self.conn) col = expected.DateColWithTz assert is_datetime64tz_dtype(col.dtype) tm.assert_series_equal(df.DateColWithTz, expected.DateColWithTz) # xref #7139 # this might or might not be converted depending on the postgres driver df = sql.read_sql_table("types_test_data", self.conn) check(df.DateColWithTz) def test_datetime_with_timezone_roundtrip(self): # GH 9086 # Write datetimetz data to a db and read it back # For dbs that support timestamps with timezones, should get back UTC # otherwise naive data should be returned expected = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3, tz="US/Pacific")} ) expected.to_sql("test_datetime_tz", self.conn, index=False) if self.flavor == "postgresql": # SQLAlchemy "timezones" (i.e. offsets) are coerced to UTC expected["A"] = expected["A"].dt.tz_convert("UTC") else: # Otherwise, timestamps are returned as local, naive expected["A"] = expected["A"].dt.tz_localize(None) result = sql.read_sql_table("test_datetime_tz", self.conn) tm.assert_frame_equal(result, expected) result = sql.read_sql_query("SELECT * FROM test_datetime_tz", self.conn) if self.flavor == "sqlite": # read_sql_query does not return datetime type like read_sql_table assert isinstance(result.loc[0, "A"], str) result["A"] = to_datetime(result["A"]) tm.assert_frame_equal(result, expected) def test_out_of_bounds_datetime(self): # GH 26761 data = DataFrame({"date": datetime(9999, 1, 1)}, index=[0]) data.to_sql("test_datetime_obb", self.conn, index=False) result = sql.read_sql_table("test_datetime_obb", self.conn) expected = DataFrame([pd.NaT], columns=["date"]) tm.assert_frame_equal(result, expected) def test_naive_datetimeindex_roundtrip(self): # GH 23510 # Ensure that a naive DatetimeIndex isn't converted to UTC dates = date_range("2018-01-01", periods=5, freq="6H")._with_freq(None) expected = DataFrame({"nums": range(5)}, index=dates) expected.to_sql("foo_table", self.conn, index_label="info_date") result = sql.read_sql_table("foo_table", self.conn, index_col="info_date") # result index with gain a name from a set_index operation; expected tm.assert_frame_equal(result, expected, check_names=False) def test_date_parsing(self): # No Parsing df = sql.read_sql_table("types_test_data", self.conn) expected_type = object if self.flavor == "sqlite" else np.datetime64 assert issubclass(df.DateCol.dtype.type, expected_type) df = sql.read_sql_table("types_test_data", self.conn, parse_dates=["DateCol"]) assert issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"DateCol": "%Y-%m-%d %H:%M:%S"} ) assert issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"DateCol": {"format": "%Y-%m-%d %H:%M:%S"}}, ) assert issubclass(df.DateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates=["IntDateCol"] ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"IntDateCol": "s"} ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) df = sql.read_sql_table( "types_test_data", self.conn, parse_dates={"IntDateCol": {"unit": "s"}} ) assert issubclass(df.IntDateCol.dtype.type, np.datetime64) def test_datetime(self): df = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)} ) df.to_sql("test_datetime", self.conn) # with read_table -> type information from schema used result = sql.read_sql_table("test_datetime", self.conn) result = result.drop("index", axis=1) tm.assert_frame_equal(result, df) # with read_sql -> no type information -> sqlite has no native result = sql.read_sql_query("SELECT * FROM test_datetime", self.conn) result = result.drop("index", axis=1) if self.flavor == "sqlite": assert isinstance(result.loc[0, "A"], str) result["A"] = to_datetime(result["A"]) tm.assert_frame_equal(result, df) else: tm.assert_frame_equal(result, df) def test_datetime_NaT(self): df = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)} ) df.loc[1, "A"] = np.nan df.to_sql("test_datetime", self.conn, index=False) # with read_table -> type information from schema used result = sql.read_sql_table("test_datetime", self.conn) tm.assert_frame_equal(result, df) # with read_sql -> no type information -> sqlite has no native result = sql.read_sql_query("SELECT * FROM test_datetime", self.conn) if self.flavor == "sqlite": assert isinstance(result.loc[0, "A"], str) result["A"] = to_datetime(result["A"], errors="coerce") tm.assert_frame_equal(result, df) else: tm.assert_frame_equal(result, df) def test_datetime_date(self): # test support for datetime.date df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"]) df.to_sql("test_date", self.conn, index=False) res = read_sql_table("test_date", self.conn) result = res["a"] expected = to_datetime(df["a"]) # comes back as datetime64 tm.assert_series_equal(result, expected) def test_datetime_time(self): # test support for datetime.time df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"]) df.to_sql("test_time", self.conn, index=False) res = read_sql_table("test_time", self.conn) tm.assert_frame_equal(res, df) # GH8341 # first, use the fallback to have the sqlite adapter put in place sqlite_conn = TestSQLiteFallback.connect() sql.to_sql(df, "test_time2", sqlite_conn, index=False) res = sql.read_sql_query("SELECT * FROM test_time2", sqlite_conn) ref = df.applymap(lambda _: _.strftime("%H:%M:%S.%f")) tm.assert_frame_equal(ref, res) # check if adapter is in place # then test if sqlalchemy is unaffected by the sqlite adapter sql.to_sql(df, "test_time3", self.conn, index=False) if self.flavor == "sqlite": res = sql.read_sql_query("SELECT * FROM test_time3", self.conn) ref = df.applymap(lambda _: _.strftime("%H:%M:%S.%f")) tm.assert_frame_equal(ref, res) res = sql.read_sql_table("test_time3", self.conn) tm.assert_frame_equal(df, res) def test_mixed_dtype_insert(self): # see GH6509 s1 = Series(2 ** 25 + 1, dtype=np.int32) s2 = Series(0.0, dtype=np.float32) df = DataFrame({"s1": s1, "s2": s2}) # write and read again df.to_sql("test_read_write", self.conn, index=False) df2 = sql.read_sql_table("test_read_write", self.conn) tm.assert_frame_equal(df, df2, check_dtype=False, check_exact=True) def test_nan_numeric(self): # NaNs in numeric float column df = DataFrame({"A": [0, 1, 2], "B": [0.2, np.nan, 5.6]}) df.to_sql("test_nan", self.conn, index=False) # with read_table result = sql.read_sql_table("test_nan", self.conn) tm.assert_frame_equal(result, df) # with read_sql result = sql.read_sql_query("SELECT * FROM test_nan", self.conn) tm.assert_frame_equal(result, df) def test_nan_fullcolumn(self): # full NaN column (numeric float column) df = DataFrame({"A": [0, 1, 2], "B": [np.nan, np.nan, np.nan]}) df.to_sql("test_nan", self.conn, index=False) # with read_table result = sql.read_sql_table("test_nan", self.conn) tm.assert_frame_equal(result, df) # with read_sql -> not type info from table -> stays None df["B"] = df["B"].astype("object") df["B"] = None result = sql.read_sql_query("SELECT * FROM test_nan", self.conn) tm.assert_frame_equal(result, df) def test_nan_string(self): # NaNs in string column df = DataFrame({"A": [0, 1, 2], "B": ["a", "b", np.nan]}) df.to_sql("test_nan", self.conn, index=False) # NaNs are coming back as None df.loc[2, "B"] = None # with read_table result = sql.read_sql_table("test_nan", self.conn) tm.assert_frame_equal(result, df) # with read_sql result = sql.read_sql_query("SELECT * FROM test_nan", self.conn) tm.assert_frame_equal(result, df) def _get_index_columns(self, tbl_name): from sqlalchemy.engine import reflection insp = reflection.Inspector.from_engine(self.conn) ixs = insp.get_indexes(tbl_name) ixs = [i["column_names"] for i in ixs] return ixs def test_to_sql_save_index(self): self._to_sql_save_index() def test_transactions(self): self._transaction_test() def test_get_schema_create_table(self): # Use a dataframe without a bool column, since MySQL converts bool to # TINYINT (which read_sql_table returns as an int and causes a dtype # mismatch) self._load_test3_data() tbl = "test_get_schema_create_table" create_sql = sql.get_schema(self.test_frame3, tbl, con=self.conn) blank_test_df = self.test_frame3.iloc[:0] self.drop_table(tbl) self.conn.execute(create_sql) returned_df = sql.read_sql_table(tbl, self.conn) tm.assert_frame_equal(returned_df, blank_test_df, check_index_type=False) self.drop_table(tbl) def test_dtype(self): cols = ["A", "B"] data = [(0.8, True), (0.9, None)] df = DataFrame(data, columns=cols) df.to_sql("dtype_test", self.conn) df.to_sql("dtype_test2", self.conn, dtype={"B": sqlalchemy.TEXT}) meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() sqltype = meta.tables["dtype_test2"].columns["B"].type assert isinstance(sqltype, sqlalchemy.TEXT) msg = "The type of B is not a SQLAlchemy type" with pytest.raises(ValueError, match=msg): df.to_sql("error", self.conn, dtype={"B": str}) # GH9083 df.to_sql("dtype_test3", self.conn, dtype={"B": sqlalchemy.String(10)}) meta.reflect() sqltype = meta.tables["dtype_test3"].columns["B"].type assert isinstance(sqltype, sqlalchemy.String) assert sqltype.length == 10 # single dtype df.to_sql("single_dtype_test", self.conn, dtype=sqlalchemy.TEXT) meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() sqltypea = meta.tables["single_dtype_test"].columns["A"].type sqltypeb = meta.tables["single_dtype_test"].columns["B"].type assert isinstance(sqltypea, sqlalchemy.TEXT) assert isinstance(sqltypeb, sqlalchemy.TEXT) def test_notna_dtype(self): cols = { "Bool": Series([True, None]), "Date": Series([datetime(2012, 5, 1), None]), "Int": Series([1, None], dtype="object"), "Float": Series([1.1, None]), } df = DataFrame(cols) tbl = "notna_dtype_test" df.to_sql(tbl, self.conn) returned_df = sql.read_sql_table(tbl, self.conn) # noqa meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() if self.flavor == "mysql": my_type = sqltypes.Integer else: my_type = sqltypes.Boolean col_dict = meta.tables[tbl].columns assert isinstance(col_dict["Bool"].type, my_type) assert isinstance(col_dict["Date"].type, sqltypes.DateTime) assert isinstance(col_dict["Int"].type, sqltypes.Integer) assert isinstance(col_dict["Float"].type, sqltypes.Float) def test_double_precision(self): V = 1.23456789101112131415 df = DataFrame( { "f32": Series([V], dtype="float32"), "f64": Series([V], dtype="float64"), "f64_as_f32": Series([V], dtype="float64"), "i32": Series([5], dtype="int32"), "i64": Series([5], dtype="int64"), } ) df.to_sql( "test_dtypes", self.conn, index=False, if_exists="replace", dtype={"f64_as_f32": sqlalchemy.Float(precision=23)}, ) res = sql.read_sql_table("test_dtypes", self.conn) # check precision of float64 assert np.round(df["f64"].iloc[0], 14) == np.round(res["f64"].iloc[0], 14) # check sql types meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() col_dict = meta.tables["test_dtypes"].columns assert str(col_dict["f32"].type) == str(col_dict["f64_as_f32"].type) assert isinstance(col_dict["f32"].type, sqltypes.Float) assert isinstance(col_dict["f64"].type, sqltypes.Float) assert isinstance(col_dict["i32"].type, sqltypes.Integer) assert isinstance(col_dict["i64"].type, sqltypes.BigInteger) def test_connectable_issue_example(self): # This tests the example raised in issue # https://github.com/pandas-dev/pandas/issues/10104 def foo(connection): query = "SELECT test_foo_data FROM test_foo_data" return sql.read_sql_query(query, con=connection) def bar(connection, data): data.to_sql(name="test_foo_data", con=connection, if_exists="append") def main(connectable): with connectable.connect() as conn: with conn.begin(): foo_data = conn.run_callable(foo) conn.run_callable(bar, foo_data) DataFrame({"test_foo_data": [0, 1, 2]}).to_sql("test_foo_data", self.conn) main(self.conn) @pytest.mark.parametrize( "input", [{"foo": [np.inf]}, {"foo": [-np.inf]}, {"foo": [-np.inf], "infe0": ["bar"]}], ) def test_to_sql_with_negative_npinf(self, input): # GH 34431 df = DataFrame(input) if self.flavor == "mysql": msg = "inf cannot be used with MySQL" with pytest.raises(ValueError, match=msg): df.to_sql("foobar", self.conn, index=False) else: df.to_sql("foobar", self.conn, index=False) res = sql.read_sql_table("foobar", self.conn) tm.assert_equal(df, res) def test_temporary_table(self): test_data = "Hello, World!" expected = DataFrame({"spam": [test_data]}) Base = declarative.declarative_base() class Temporary(Base): __tablename__ = "temp_test" __table_args__ = {"prefixes": ["TEMPORARY"]} id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True) spam = sqlalchemy.Column(sqlalchemy.Unicode(30), nullable=False) Session = sa_session.sessionmaker(bind=self.conn) session = Session() with session.transaction: conn = session.connection() Temporary.__table__.create(conn) session.add(Temporary(spam=test_data)) session.flush() df = sql.read_sql_query(sql=sqlalchemy.select([Temporary.spam]), con=conn) tm.assert_frame_equal(df, expected) class _TestSQLAlchemyConn(_EngineToConnMixin, _TestSQLAlchemy): def test_transactions(self): pytest.skip("Nested transactions rollbacks don't work with Pandas") class _TestSQLiteAlchemy: """ Test the sqlalchemy backend against an in-memory sqlite database. """ flavor = "sqlite" @classmethod def connect(cls): return sqlalchemy.create_engine("sqlite:///:memory:") @classmethod def setup_driver(cls): # sqlite3 is built-in cls.driver = None def test_default_type_conversion(self): df = sql.read_sql_table("types_test_data", self.conn) assert issubclass(df.FloatCol.dtype.type, np.floating) assert issubclass(df.IntCol.dtype.type, np.integer) # sqlite has no boolean type, so integer type is returned assert issubclass(df.BoolCol.dtype.type, np.integer) # Int column with NA values stays as float assert issubclass(df.IntColWithNull.dtype.type, np.floating) # Non-native Bool column with NA values stays as float assert issubclass(df.BoolColWithNull.dtype.type, np.floating) def test_default_date_load(self): df = sql.read_sql_table("types_test_data", self.conn) # IMPORTANT - sqlite has no native date type, so shouldn't parse, but assert not issubclass(df.DateCol.dtype.type, np.datetime64) def test_bigint_warning(self): # test no warning for BIGINT (to support int64) is raised (GH7433) df = DataFrame({"a": [1, 2]}, dtype="int64") df.to_sql("test_bigintwarning", self.conn, index=False) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") sql.read_sql_table("test_bigintwarning", self.conn) assert len(w) == 0 class _TestMySQLAlchemy: """ Test the sqlalchemy backend against an MySQL database. """ flavor = "mysql" port = 3306 @classmethod def connect(cls): return sqlalchemy.create_engine( f"mysql+{cls.driver}://root@localhost:{cls.port}/pandas", connect_args=cls.connect_args, ) @classmethod def setup_driver(cls): pymysql = pytest.importorskip("pymysql") cls.driver = "pymysql" cls.connect_args = {"client_flag": pymysql.constants.CLIENT.MULTI_STATEMENTS} def test_default_type_conversion(self): df = sql.read_sql_table("types_test_data", self.conn) assert issubclass(df.FloatCol.dtype.type, np.floating) assert issubclass(df.IntCol.dtype.type, np.integer) # MySQL has no real BOOL type (it's an alias for TINYINT) assert issubclass(df.BoolCol.dtype.type, np.integer) # Int column with NA values stays as float assert issubclass(df.IntColWithNull.dtype.type, np.floating) # Bool column with NA = int column with NA values => becomes float assert issubclass(df.BoolColWithNull.dtype.type, np.floating) def test_read_procedure(self): import pymysql # see GH7324. Although it is more an api test, it is added to the # mysql tests as sqlite does not have stored procedures df = DataFrame({"a": [1, 2, 3], "b": [0.1, 0.2, 0.3]}) df.to_sql("test_procedure", self.conn, index=False) proc = """DROP PROCEDURE IF EXISTS get_testdb; CREATE PROCEDURE get_testdb () BEGIN SELECT * FROM test_procedure; END""" connection = self.conn.connect() trans = connection.begin() try: r1 = connection.execute(proc) # noqa trans.commit() except pymysql.Error: trans.rollback() raise res1 = sql.read_sql_query("CALL get_testdb();", self.conn) tm.assert_frame_equal(df, res1) # test delegation to read_sql_query res2 = sql.read_sql("CALL get_testdb();", self.conn) tm.assert_frame_equal(df, res2) class _TestPostgreSQLAlchemy: """ Test the sqlalchemy backend against an PostgreSQL database. """ flavor = "postgresql" port = 5432 @classmethod def connect(cls): return sqlalchemy.create_engine( f"postgresql+{cls.driver}://postgres:postgres@localhost:{cls.port}/pandas" ) @classmethod def setup_driver(cls): pytest.importorskip("psycopg2") cls.driver = "psycopg2" def test_schema_support(self): # only test this for postgresql (schema's not supported in # mysql/sqlite) df = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]}) # create a schema self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;") self.conn.execute("CREATE SCHEMA other;") # write dataframe to different schema's df.to_sql("test_schema_public", self.conn, index=False) df.to_sql( "test_schema_public_explicit", self.conn, index=False, schema="public" ) df.to_sql("test_schema_other", self.conn, index=False, schema="other") # read dataframes back in res1 = sql.read_sql_table("test_schema_public", self.conn) tm.assert_frame_equal(df, res1) res2 = sql.read_sql_table("test_schema_public_explicit", self.conn) tm.assert_frame_equal(df, res2) res3 = sql.read_sql_table( "test_schema_public_explicit", self.conn, schema="public" ) tm.assert_frame_equal(df, res3) res4 = sql.read_sql_table("test_schema_other", self.conn, schema="other") tm.assert_frame_equal(df, res4) msg = "Table test_schema_other not found" with pytest.raises(ValueError, match=msg): sql.read_sql_table("test_schema_other", self.conn, schema="public") # different if_exists options # create a schema self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;") self.conn.execute("CREATE SCHEMA other;") # write dataframe with different if_exists options df.to_sql("test_schema_other", self.conn, schema="other", index=False) df.to_sql( "test_schema_other", self.conn, schema="other", index=False, if_exists="replace", ) df.to_sql( "test_schema_other", self.conn, schema="other", index=False, if_exists="append", ) res = sql.read_sql_table("test_schema_other", self.conn, schema="other") tm.assert_frame_equal(concat([df, df], ignore_index=True), res) # specifying schema in user-provided meta # The schema won't be applied on another Connection # because of transactional schemas if isinstance(self.conn, sqlalchemy.engine.Engine): engine2 = self.connect() meta = sqlalchemy.MetaData(engine2, schema="other") pdsql = sql.SQLDatabase(engine2, meta=meta) pdsql.to_sql(df, "test_schema_other2", index=False) pdsql.to_sql(df, "test_schema_other2", index=False, if_exists="replace") pdsql.to_sql(df, "test_schema_other2", index=False, if_exists="append") res1 = sql.read_sql_table("test_schema_other2", self.conn, schema="other") res2 = pdsql.read_table("test_schema_other2") tm.assert_frame_equal(res1, res2) def test_copy_from_callable_insertion_method(self): # GH 8953 # Example in io.rst found under _io.sql.method # not available in sqlite, mysql def psql_insert_copy(table, conn, keys, data_iter): # gets a DBAPI connection that can provide a cursor dbapi_conn = conn.connection with dbapi_conn.cursor() as cur: s_buf = StringIO() writer = csv.writer(s_buf) writer.writerows(data_iter) s_buf.seek(0) columns = ", ".join(f'"{k}"' for k in keys) if table.schema: table_name = f"{table.schema}.{table.name}" else: table_name = table.name sql_query = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV" cur.copy_expert(sql=sql_query, file=s_buf) expected = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]}) expected.to_sql( "test_copy_insert", self.conn, index=False, method=psql_insert_copy ) result = sql.read_sql_table("test_copy_insert", self.conn) tm.assert_frame_equal(result, expected) @pytest.mark.single @pytest.mark.db class TestMySQLAlchemy(_TestMySQLAlchemy, _TestSQLAlchemy): pass @pytest.mark.single @pytest.mark.db class TestMySQLAlchemyConn(_TestMySQLAlchemy, _TestSQLAlchemyConn): pass @pytest.mark.single @pytest.mark.db class TestPostgreSQLAlchemy(_TestPostgreSQLAlchemy, _TestSQLAlchemy): pass @pytest.mark.single @pytest.mark.db class TestPostgreSQLAlchemyConn(_TestPostgreSQLAlchemy, _TestSQLAlchemyConn): pass @pytest.mark.single class TestSQLiteAlchemy(_TestSQLiteAlchemy, _TestSQLAlchemy): pass @pytest.mark.single class TestSQLiteAlchemyConn(_TestSQLiteAlchemy, _TestSQLAlchemyConn): pass # ----------------------------------------------------------------------------- # -- Test Sqlite / MySQL fallback @pytest.mark.single class TestSQLiteFallback(SQLiteMixIn, PandasSQLTest): """ Test the fallback mode against an in-memory sqlite database. """ flavor = "sqlite" @classmethod def connect(cls): return sqlite3.connect(":memory:") def setup_connect(self): self.conn = self.connect() def load_test_data_and_sql(self): self.pandasSQL = sql.SQLiteDatabase(self.conn) self._load_test1_data() @pytest.fixture(autouse=True) def setup_method(self, load_iris_data): self.load_test_data_and_sql() def test_read_sql(self): self._read_sql_iris() def test_read_sql_parameter(self): self._read_sql_iris_parameter() def test_read_sql_named_parameter(self): self._read_sql_iris_named_parameter() def test_to_sql(self): self._to_sql() def test_to_sql_empty(self): self._to_sql_empty() def test_to_sql_fail(self): self._to_sql_fail() def test_to_sql_replace(self): self._to_sql_replace() def test_to_sql_append(self): self._to_sql_append() def test_to_sql_method_multi(self): # GH 29921 self._to_sql(method="multi") def test_create_and_drop_table(self): temp_frame = DataFrame( {"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]} ) self.pandasSQL.to_sql(temp_frame, "drop_test_frame") assert self.pandasSQL.has_table("drop_test_frame") self.pandasSQL.drop_table("drop_test_frame") assert not self.pandasSQL.has_table("drop_test_frame") def test_roundtrip(self): self._roundtrip() def test_execute_sql(self): self._execute_sql() def test_datetime_date(self): # test support for datetime.date df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"]) df.to_sql("test_date", self.conn, index=False) res = read_sql_query("SELECT * FROM test_date", self.conn) if self.flavor == "sqlite": # comes back as strings tm.assert_frame_equal(res, df.astype(str)) elif self.flavor == "mysql": tm.assert_frame_equal(res, df) def test_datetime_time(self): # test support for datetime.time, GH #8341 df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"]) df.to_sql("test_time", self.conn, index=False) res = read_sql_query("SELECT * FROM test_time", self.conn) if self.flavor == "sqlite": # comes back as strings expected = df.applymap(lambda _: _.strftime("%H:%M:%S.%f")) tm.assert_frame_equal(res, expected) def _get_index_columns(self, tbl_name): ixs = sql.read_sql_query( "SELECT * FROM sqlite_master WHERE type = 'index' " + f"AND tbl_name = '{tbl_name}'", self.conn, ) ix_cols = [] for ix_name in ixs.name: ix_info = sql.read_sql_query(f"PRAGMA index_info({ix_name})", self.conn) ix_cols.append(ix_info.name.tolist()) return ix_cols def test_to_sql_save_index(self): self._to_sql_save_index() def test_transactions(self): self._transaction_test() def _get_sqlite_column_type(self, table, column): recs = self.conn.execute(f"PRAGMA table_info({table})") for cid, name, ctype, not_null, default, pk in recs: if name == column: return ctype raise ValueError(f"Table {table}, column {column} not found") def test_dtype(self): if self.flavor == "mysql": pytest.skip("Not applicable to MySQL legacy") cols = ["A", "B"] data = [(0.8, True), (0.9, None)] df = DataFrame(data, columns=cols) df.to_sql("dtype_test", self.conn) df.to_sql("dtype_test2", self.conn, dtype={"B": "STRING"}) # sqlite stores Boolean values as INTEGER assert self._get_sqlite_column_type("dtype_test", "B") == "INTEGER" assert self._get_sqlite_column_type("dtype_test2", "B") == "STRING" msg = r"B \(<class 'bool'>\) not a string" with pytest.raises(ValueError, match=msg): df.to_sql("error", self.conn, dtype={"B": bool}) # single dtype df.to_sql("single_dtype_test", self.conn, dtype="STRING") assert self._get_sqlite_column_type("single_dtype_test", "A") == "STRING" assert self._get_sqlite_column_type("single_dtype_test", "B") == "STRING" def test_notna_dtype(self): if self.flavor == "mysql": pytest.skip("Not applicable to MySQL legacy") cols = { "Bool": Series([True, None]), "Date": Series([datetime(2012, 5, 1), None]), "Int": Series([1, None], dtype="object"), "Float": Series([1.1, None]), } df = DataFrame(cols) tbl = "notna_dtype_test" df.to_sql(tbl, self.conn) assert self._get_sqlite_column_type(tbl, "Bool") == "INTEGER" assert self._get_sqlite_column_type(tbl, "Date") == "TIMESTAMP" assert self._get_sqlite_column_type(tbl, "Int") == "INTEGER" assert self._get_sqlite_column_type(tbl, "Float") == "REAL" def test_illegal_names(self): # For sqlite, these should work fine df = DataFrame([[1, 2], [3, 4]], columns=["a", "b"]) msg = "Empty table or column name specified" with pytest.raises(ValueError, match=msg): df.to_sql("", self.conn) for ndx, weird_name in enumerate( [ "test_weird_name]", "test_weird_name[", "test_weird_name`", 'test_weird_name"', "test_weird_name'", "_b.test_weird_name_01-30", '"_b.test_weird_name_01-30"', "99beginswithnumber", "12345", "\xe9", ] ): df.to_sql(weird_name, self.conn) sql.table_exists(weird_name, self.conn) df2 = DataFrame([[1, 2], [3, 4]], columns=["a", weird_name]) c_tbl = f"test_weird_col_name{ndx:d}" df2.to_sql(c_tbl, self.conn) sql.table_exists(c_tbl, self.conn) # ----------------------------------------------------------------------------- # -- Old tests from 0.13.1 (before refactor using sqlalchemy) def date_format(dt): """Returns date in YYYYMMDD format.""" return dt.strftime("%Y%m%d") _formatters = { datetime: "'{}'".format, str: "'{}'".format, np.str_: "'{}'".format, bytes: "'{}'".format, float: "{:.8f}".format, int: "{:d}".format, type(None): lambda x: "NULL", np.float64: "{:.10f}".format, bool: "'{!s}'".format, } def format_query(sql, *args): processed_args = [] for arg in args: if isinstance(arg, float) and isna(arg): arg = None formatter = _formatters[type(arg)] processed_args.append(formatter(arg)) return sql % tuple(processed_args) def tquery(query, con=None, cur=None): """Replace removed sql.tquery function""" res = sql.execute(query, con=con, cur=cur).fetchall() if res is None: return None else: return list(res) @pytest.mark.single class TestXSQLite(SQLiteMixIn): @pytest.fixture(autouse=True) def setup_method(self, request, datapath): self.method = request.function self.conn = sqlite3.connect(":memory:") # In some test cases we may close db connection # Re-open conn here so we can perform cleanup in teardown yield self.method = request.function self.conn = sqlite3.connect(":memory:") def test_basic(self): frame = tm.makeTimeDataFrame() self._check_roundtrip(frame) def test_write_row_by_row(self): frame = tm.makeTimeDataFrame() frame.iloc[0, 0] = np.nan create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() cur.execute(create_sql) cur = self.conn.cursor() ins = "INSERT INTO test VALUES (%s, %s, %s, %s)" for idx, row in frame.iterrows(): fmt_sql = format_query(ins, *row) tquery(fmt_sql, cur=cur) self.conn.commit() result = sql.read_sql("select * from test", con=self.conn) result.index = frame.index tm.assert_frame_equal(result, frame, rtol=1e-3) def test_execute(self): frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() cur.execute(create_sql) ins = "INSERT INTO test VALUES (?, ?, ?, ?)" row = frame.iloc[0] sql.execute(ins, self.conn, params=tuple(row)) self.conn.commit() result = sql.read_sql("select * from test", self.conn) result.index = frame.index[:1] tm.assert_frame_equal(result, frame[:1]) def test_schema(self): frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test") lines = create_sql.splitlines() for line in lines: tokens = line.split(" ") if len(tokens) == 2 and tokens[0] == "A": assert tokens[1] == "DATETIME" frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test", keys=["A", "B"]) lines = create_sql.splitlines() assert 'PRIMARY KEY ("A", "B")' in create_sql cur = self.conn.cursor() cur.execute(create_sql) def test_execute_fail(self): create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a, b) ); """ cur = self.conn.cursor() cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn) with pytest.raises(sql.DatabaseError, match="Execution failed on sql"): sql.execute('INSERT INTO test VALUES("foo", "bar", 7)', self.conn) def test_execute_closed_connection(self): create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a, b) ); """ cur = self.conn.cursor() cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) self.conn.close() with tm.external_error_raised(sqlite3.ProgrammingError): tquery("select * from test", con=self.conn) def test_na_roundtrip(self): pass def _check_roundtrip(self, frame): sql.to_sql(frame, name="test_table", con=self.conn, index=False) result = sql.read_sql("select * from test_table", self.conn) # HACK! Change this once indexes are handled properly. result.index = frame.index expected = frame tm.assert_frame_equal(result, expected) frame["txt"] = ["a"] * len(frame) frame2 = frame.copy() new_idx = Index(np.arange(len(frame2))) + 10 frame2["Idx"] = new_idx.copy() sql.to_sql(frame2, name="test_table2", con=self.conn, index=False) result = sql.read_sql("select * from test_table2", self.conn, index_col="Idx") expected = frame.copy() expected.index = new_idx expected.index.name = "Idx" tm.assert_frame_equal(expected, result) def test_keyword_as_column_names(self): df = DataFrame({"From": np.ones(5)}) sql.to_sql(df, con=self.conn, name="testkeywords", index=False) def test_onecolumn_of_integer(self): # GH 3628 # a column_of_integers dataframe should transfer well to sql mono_df = DataFrame([1, 2], columns=["c0"]) sql.to_sql(mono_df, con=self.conn, name="mono_df", index=False) # computing the sum via sql con_x = self.conn the_sum = sum(my_c0[0] for my_c0 in con_x.execute("select * from mono_df")) # it should not fail, and gives 3 ( Issue #3628 ) assert the_sum == 3 result = sql.read_sql("select * from mono_df", con_x) tm.assert_frame_equal(result, mono_df) def test_if_exists(self): df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]}) df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]}) table_name = "table_if_exists" sql_select = f"SELECT * FROM {table_name}" def clean_up(test_table_to_drop): """ Drops tables created from individual tests so no dependencies arise from sequential tests """ self.drop_table(test_table_to_drop) msg = "'notvalidvalue' is not valid for if_exists" with pytest.raises(ValueError, match=msg): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="notvalidvalue", ) clean_up(table_name) # test if_exists='fail' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail" ) msg = "Table 'table_if_exists' already exists" with pytest.raises(ValueError, match=msg): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail" ) # test if_exists='replace' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(3, "C"), (4, "D"), (5, "E")] clean_up(table_name) # test if_exists='append' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="append", index=False, ) assert tquery(sql_select, con=self.conn) == [ (1, "A"), (2, "B"), (3, "C"), (4, "D"), (5, "E"), ] clean_up(table_name) @pytest.mark.single @pytest.mark.db @pytest.mark.skip( reason="gh-13611: there is no support for MySQL if SQLAlchemy is not installed" ) class TestXMySQL(MySQLMixIn): @pytest.fixture(autouse=True, scope="class") def setup_class(cls): pymysql = pytest.importorskip("pymysql") pymysql.connect(host="localhost", user="root", passwd="", db="pandas") try: pymysql.connect(read_default_group="pandas") except pymysql.ProgrammingError as err: raise RuntimeError( "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err except pymysql.Error as err: raise RuntimeError( "Cannot connect to database. " "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err @pytest.fixture(autouse=True) def setup_method(self, request, datapath): pymysql = pytest.importorskip("pymysql") pymysql.connect(host="localhost", user="root", passwd="", db="pandas") try: pymysql.connect(read_default_group="pandas") except pymysql.ProgrammingError as err: raise RuntimeError( "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err except pymysql.Error as err: raise RuntimeError( "Cannot connect to database. " "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf." ) from err self.method = request.function def test_basic(self): frame = tm.makeTimeDataFrame() self._check_roundtrip(frame) def test_write_row_by_row(self): frame = tm.makeTimeDataFrame() frame.iloc[0, 0] = np.nan drop_sql = "DROP TABLE IF EXISTS test" create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) ins = "INSERT INTO test VALUES (%s, %s, %s, %s)" for idx, row in frame.iterrows(): fmt_sql = format_query(ins, *row) tquery(fmt_sql, cur=cur) self.conn.commit() result = sql.read_sql("select * from test", con=self.conn) result.index = frame.index tm.assert_frame_equal(result, frame, rtol=1e-3) # GH#32571 result comes back rounded to 6 digits in some builds; # no obvious pattern def test_chunksize_read_type(self): frame = tm.makeTimeDataFrame() frame.index.name = "index" drop_sql = "DROP TABLE IF EXISTS test" cur = self.conn.cursor() cur.execute(drop_sql) sql.to_sql(frame, name="test", con=self.conn) query = "select * from test" chunksize = 5 chunk_gen = pd.read_sql_query( sql=query, con=self.conn, chunksize=chunksize, index_col="index" ) chunk_df = next(chunk_gen) tm.assert_frame_equal(frame[:chunksize], chunk_df) def test_execute(self): frame = tm.makeTimeDataFrame() drop_sql = "DROP TABLE IF EXISTS test" create_sql = sql.get_schema(frame, "test") cur = self.conn.cursor() with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unknown table.*") cur.execute(drop_sql) cur.execute(create_sql) ins = "INSERT INTO test VALUES (%s, %s, %s, %s)" row = frame.iloc[0].values.tolist() sql.execute(ins, self.conn, params=tuple(row)) self.conn.commit() result = sql.read_sql("select * from test", self.conn) result.index = frame.index[:1] tm.assert_frame_equal(result, frame[:1]) def test_schema(self): frame = tm.makeTimeDataFrame() create_sql = sql.get_schema(frame, "test") lines = create_sql.splitlines() for line in lines: tokens = line.split(" ") if len(tokens) == 2 and tokens[0] == "A": assert tokens[1] == "DATETIME" frame = tm.makeTimeDataFrame() drop_sql = "DROP TABLE IF EXISTS test" create_sql = sql.get_schema(frame, "test", keys=["A", "B"]) lines = create_sql.splitlines() assert "PRIMARY KEY (`A`, `B`)" in create_sql cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) def test_execute_fail(self): drop_sql = "DROP TABLE IF EXISTS test" create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a(5), b(5)) ); """ cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn) with pytest.raises(Exception, match="<insert message here>"): sql.execute('INSERT INTO test VALUES("foo", "bar", 7)', self.conn) def test_execute_closed_connection(self, request, datapath): drop_sql = "DROP TABLE IF EXISTS test" create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a(5), b(5)) ); """ cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) self.conn.close() with pytest.raises(Exception, match="<insert message here>"): tquery("select * from test", con=self.conn) # Initialize connection again (needed for tearDown) self.setup_method(request, datapath) def test_na_roundtrip(self): pass def _check_roundtrip(self, frame): drop_sql = "DROP TABLE IF EXISTS test_table" cur = self.conn.cursor() with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unknown table.*") cur.execute(drop_sql) sql.to_sql(frame, name="test_table", con=self.conn, index=False) result = sql.read_sql("select * from test_table", self.conn) # HACK! Change this once indexes are handled properly. result.index = frame.index result.index.name = frame.index.name expected = frame tm.assert_frame_equal(result, expected) frame["txt"] = ["a"] * len(frame) frame2 = frame.copy() index = Index(np.arange(len(frame2))) + 10 frame2["Idx"] = index drop_sql = "DROP TABLE IF EXISTS test_table2" cur = self.conn.cursor() with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unknown table.*") cur.execute(drop_sql) sql.to_sql(frame2, name="test_table2", con=self.conn, index=False) result = sql.read_sql("select * from test_table2", self.conn, index_col="Idx") expected = frame.copy() # HACK! Change this once indexes are handled properly. expected.index = index expected.index.names = result.index.names tm.assert_frame_equal(expected, result) def test_keyword_as_column_names(self): df = DataFrame({"From": np.ones(5)}) sql.to_sql( df, con=self.conn, name="testkeywords", if_exists="replace", index=False ) def test_if_exists(self): df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]}) df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]}) table_name = "table_if_exists" sql_select = f"SELECT * FROM {table_name}" def clean_up(test_table_to_drop): """ Drops tables created from individual tests so no dependencies arise from sequential tests """ self.drop_table(test_table_to_drop) # test if invalid value for if_exists raises appropriate error with pytest.raises(ValueError, match="<insert message here>"): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="notvalidvalue", ) clean_up(table_name) # test if_exists='fail' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail", index=False, ) with pytest.raises(ValueError, match="<insert message here>"): sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail" ) # test if_exists='replace' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="replace", index=False, ) assert tquery(sql_select, con=self.conn) == [(3, "C"), (4, "D"), (5, "E")] clean_up(table_name) # test if_exists='append' sql.to_sql( frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail", index=False, ) assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")] sql.to_sql( frame=df_if_exists_2, con=self.conn, name=table_name, if_exists="append", index=False, ) assert tquery(sql_select, con=self.conn) == [ (1, "A"), (2, "B"), (3, "C"), (4, "D"), (5, "E"), ] clean_up(table_name)
gfyoung/pandas
pandas/tests/io/test_sql.py
pandas/core/arrays/__init__.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Feb 25 05:46:36 2020 @author: deborahkhider Contains all relevant mapping functions """ __all__=['map_all'] import cartopy.crs as ccrs import cartopy.feature as cfeature import matplotlib.pyplot as plt import numpy as np import pandas as pd from .plotting import savefig, showfig def set_proj(projection='Robinson', proj_default = True): """ Set the projection for Cartopy. Parameters ---------- projection : string the map projection. Available projections: 'Robinson' (default), 'PlateCarree', 'AlbertsEqualArea', 'AzimuthalEquidistant','EquidistantConic','LambertConformal', 'LambertCylindrical','Mercator','Miller','Mollweide','Orthographic', 'Sinusoidal','Stereographic','TransverseMercator','UTM', 'InterruptedGoodeHomolosine','RotatedPole','OSGB','EuroPP', 'Geostationary','NearsidePerspective','EckertI','EckertII', 'EckertIII','EckertIV','EckertV','EckertVI','EqualEarth','Gnomonic', 'LambertAzimuthalEqualArea','NorthPolarStereo','OSNI','SouthPolarStereo' proj_default : bool If True, uses the standard projection attributes from Cartopy. Enter new attributes in a dictionary to change them. Lists of attributes can be found in the Cartopy documentation: https://scitools.org.uk/cartopy/docs/latest/crs/projections.html#eckertiv Returns ------- proj : the Cartopy projection object See Also -------- pyleoclim.utils.mapping.map_all : mapping function making use of the projection """ if proj_default is not True and type(proj_default) is not dict: raise TypeError('The default for the projections should either be provided'+ ' as a dictionary or set to True') # Set the projection if projection == 'Robinson': if proj_default is True: proj = ccrs.Robinson() else: proj = ccrs.Robinson(**proj_default) elif projection == 'PlateCarree': if proj_default is True: proj = ccrs.PlateCarree() else: proj = ccrs.PlateCarree(**proj_default) elif projection == 'AlbersEqualArea': if proj_default is True: proj = ccrs.AlbersEqualArea() else: proj = ccrs.AlbersEqualArea(**proj_default) elif projection == 'AzimuthalEquidistant': if proj_default is True: proj = ccrs.AzimuthalEquidistant() else: proj = ccrs.AzimuthalEquidistant(**proj_default) elif projection == 'EquidistantConic': if proj_default is True: proj = ccrs.EquidistantConic() else: proj = ccrs.EquidistantConic(**proj_default) elif projection == 'LambertConformal': if proj_default is True: proj = ccrs.LambertConformal() else: proj = ccrs.LambertConformal(**proj_default) elif projection == 'LambertCylindrical': if proj_default is True: proj = ccrs.LambertCylindrical() else: proj = ccrs.LambertCylindrical(**proj_default) elif projection == 'Mercator': if proj_default is True: proj = ccrs.Mercator() else: proj = ccrs.Mercator(**proj_default) elif projection == 'Miller': if proj_default is True: proj = ccrs.Miller() else: proj = ccrs.Miller(**proj_default) elif projection == 'Mollweide': if proj_default is True: proj = ccrs.Mollweide() else: proj = ccrs.Mollweide(**proj_default) elif projection == 'Orthographic': if proj_default is True: proj = ccrs.Orthographic() else: proj = ccrs.Orthographic(**proj_default) elif projection == 'Sinusoidal': if proj_default is True: proj = ccrs.Sinusoidal() else: proj = ccrs.Sinusoidal(**proj_default) elif projection == 'Stereographic': if proj_default is True: proj = ccrs.Stereographic() else: proj = ccrs.Stereographic(**proj_default) elif projection == 'TransverseMercator': if proj_default is True: proj = ccrs.TransverseMercator() else: proj = ccrs.TransverseMercator(**proj_default) elif projection == 'TransverseMercator': if proj_default is True: proj = ccrs.TransverseMercator() else: proj = ccrs.TransverseMercator(**proj_default) elif projection == 'UTM': if proj_default is True: proj = ccrs.UTM() else: proj = ccrs.UTM(**proj_default) elif projection == 'UTM': if proj_default is True: proj = ccrs.UTM() else: proj = ccrs.UTM(**proj_default) elif projection == 'InterruptedGoodeHomolosine': if proj_default is True: proj = ccrs.InterruptedGoodeHomolosine() else: proj = ccrs.InterruptedGoodeHomolosine(**proj_default) elif projection == 'RotatedPole': if proj_default is True: proj = ccrs.RotatedPole() else: proj = ccrs.RotatedPole(**proj_default) elif projection == 'OSGB': if proj_default is True: proj = ccrs.OSGB() else: proj = ccrs.OSGB(**proj_default) elif projection == 'EuroPP': if proj_default is True: proj = ccrs.EuroPP() else: proj = ccrs.EuroPP(**proj_default) elif projection == 'Geostationary': if proj_default is True: proj = ccrs.Geostationary() else: proj = ccrs.Geostationary(**proj_default) elif projection == 'NearsidePerspective': if proj_default is True: proj = ccrs.NearsidePerspective() else: proj = ccrs.NearsidePerspective(**proj_default) elif projection == 'EckertI': if proj_default is True: proj = ccrs.EckertI() else: proj = ccrs.EckertI(**proj_default) elif projection == 'EckertII': if proj_default is True: proj = ccrs.EckertII() else: proj = ccrs.EckertII(**proj_default) elif projection == 'EckertIII': if proj_default is True: proj = ccrs.EckertIII() else: proj = ccrs.EckertIII(**proj_default) elif projection == 'EckertIV': if proj_default is True: proj = ccrs.EckertIV() else: proj = ccrs.EckertIV(**proj_default) elif projection == 'EckertV': if proj_default is True: proj = ccrs.EckertV() else: proj = ccrs.EckertV(**proj_default) elif projection == 'EckertVI': if proj_default is True: proj = ccrs.EckertVI() else: proj = ccrs.EckertVI(**proj_default) elif projection == 'EqualEarth': if proj_default is True: proj = ccrs.EqualEarth() else: proj = ccrs.EqualEarth(**proj_default) elif projection == 'Gnomonic': if proj_default is True: proj = ccrs.Gnomonic() else: proj = ccrs.Gnomonic(**proj_default) elif projection == 'LambertAzimuthalEqualArea': if proj_default is True: proj = ccrs.LambertAzimuthalEqualArea() else: proj = ccrs.LambertAzimuthalEqualArea(**proj_default) elif projection == 'NorthPolarStereo': if proj_default is True: proj = ccrs.NorthPolarStereo() else: proj = ccrs.NorthPolarStereo(**proj_default) elif projection == 'OSNI': if proj_default is True: proj = ccrs.OSNI() else: proj = ccrs.OSNI(**proj_default) elif projection == 'OSNI': if proj_default is True: proj = ccrs.SouthPolarStereo() else: proj = ccrs.SouthPolarStereo(**proj_default) else: raise ValueError('Invalid projection type') return proj def map_all(lat, lon, criteria, marker=None, color =None, projection = 'Robinson', proj_default = True, background = True,borders = False, rivers = False, lakes = False, figsize = None, ax = None, scatter_kwargs=None, legend=True, lgd_kwargs=None,savefig_settings=None, mute=False): """ Map the location of all lat/lon according to some criteria Map the location of all lat/lon according to some criteria. Based on functions defined in the Cartopy package. Parameters ---------- lat : list a list of latitudes. lon : list a list of longitudes. criteria : list a list of unique criteria for plotting purposes. For instance, a map by the types of archive present in the dataset or proxy observations. Should have the same length as lon/lat. marker : list a list of possible markers for each criterion. If None, will use pyleoclim default color : list a list of possible colors for each criterion. If None, will use pyleoclim default projection : string the map projection. Available projections: 'Robinson' (default), 'PlateCarree', 'AlbertsEqualArea', 'AzimuthalEquidistant','EquidistantConic','LambertConformal', 'LambertCylindrical','Mercator','Miller','Mollweide','Orthographic', 'Sinusoidal','Stereographic','TransverseMercator','UTM', 'InterruptedGoodeHomolosine','RotatedPole','OSGB','EuroPP', 'Geostationary','NearsidePerspective','EckertI','EckertII', 'EckertIII','EckertIV','EckertV','EckertVI','EqualEarth','Gnomonic', 'LambertAzimuthalEqualArea','NorthPolarStereo','OSNI','SouthPolarStereo' proj_default : bool If True, uses the standard projection attributes. Enter new attributes in a dictionary to change them. Lists of attributes can be found in the Cartopy documentation: https://scitools.org.uk/cartopy/docs/latest/crs/projections.html#eckertiv background : bool If True, uses a shaded relief background (only one available in Cartopy) borders : bool Draws the countries border. Defaults is off (False). rivers : bool Draws major rivers. Default is off (False). lakes : bool Draws major lakes. Default is off (False). figsize : list the size for the figure ax: axis,optional Return as axis instead of figure (useful to integrate plot into a subplot) scatter_kwargs : dict Dictionary of arguments available in matplotlib.pyplot.scatter (https://matplotlib.org/3.2.1/api/_as_gen/matplotlib.pyplot.scatter.html). legend : bool Whether the draw a legend on the figure lgd_kwargs : dict Dictionary of arguments for matplotlib.pyplot.legend (https://matplotlib.org/3.2.1/api/_as_gen/matplotlib.pyplot.legend.html) savefig_settings : dict Dictionary of arguments for matplotlib.pyplot.saveFig. - "path" must be specified; it can be any existed or non-existed path, with or without a suffix; if the suffix is not given in "path", it will follow "format" - "format" can be one of {"pdf", "eps", "png", "ps"} mute : bool if True, the plot will not show; recommend to set to true when more modifications are going to be made on ax Returns ------- ax: The figure, or axis if ax specified See Also -------- pyleoclim.utils.mapping.set_proj : Set the projection for Cartopy-based maps """ #Check that the lists have the same length and convert to numpy arrays if len(lat)!=len(lon) or len(lat)!=len(criteria) or len(lon)!=len(criteria): raise ValueError("Latitude, Longitude, and criteria list must be the same" +\ "length") # Check that the default is set to True or in dictionary format if proj_default is not True and type(proj_default) is not dict: raise TypeError('The default for the projections should either be provided'+ ' as a dictionary or set to True') # handle dict defaults savefig_settings={} if savefig_settings is None else savefig_settings.copy() scatter_kwargs = {} if scatter_kwargs is None else scatter_kwargs.copy() lgd_kwargs = {} if lgd_kwargs is None else lgd_kwargs.copy() if marker!=None: if 'marker' in scatter_kwargs.keys(): print('marker has been set as a parameter to the map_all function, overriding scatter_kwargs') del scatter_kwargs['marker'] if len(marker)!=len(criteria): raise ValueError('The marker vector should have the same length as the criteria vector') if color!=None: if 'facecolor' in scatter_kwargs.keys(): print('facecolor has been set as a parameter to the map_all function, overriding scatter_kwargs') del scatter_kwargs['facecolor'] if len(color)!=len(criteria): raise ValueError('The color vector should have the same length as the criteria vector') #get unique criteria/color/marker color_data=pd.DataFrame({'criteria':criteria,'color':color,'marker':marker}) palette = color_data.drop_duplicates(subset='criteria') # get the projection: proj = set_proj(projection=projection, proj_default=proj_default) data_crs = ccrs.PlateCarree() # Make the figure if ax is None: fig, ax = plt.subplots(figsize=figsize,subplot_kw=dict(projection=proj)) # draw the coastlines ax.coastlines() # Background if background is True: ax.stock_img() #Other extra information if borders is True: ax.add_feature(cfeature.BORDERS) if lakes is True: ax.add_feature(cfeature.LAKES) if rivers is True: ax.add_feature(cfeature.RIVERS) # Get the indexes by criteria if color==None and marker==None: for crit in set(criteria): # Grab the indices with same criteria index = [i for i,x in enumerate(criteria) if x == crit] ax.scatter(np.array(lon)[index],np.array(lat)[index], zorder = 10, label = crit, transform=data_crs, **scatter_kwargs) elif color==None and marker!=None: for crit in set(criteria): # Grab the indices with same criteria index = [i for i,x in enumerate(criteria) if x == crit] ax.scatter(np.array(lon)[index],np.array(lat)[index], zorder = 10, label = crit, transform=data_crs, marker = palette[palette['criteria']==crit]['marker'].iloc[0], **scatter_kwargs) elif color!=None and marker==None: for crit in set(criteria): # Grab the indices with same criteria index = [i for i,x in enumerate(criteria) if x == crit] ax.scatter(np.array(lon)[index],np.array(lat)[index], zorder = 10, label = crit, transform=data_crs, facecolor = palette[palette['criteria']==crit]['color'].iloc[0], **scatter_kwargs) elif color!=None and marker!=None: for crit in set(criteria): # Grab the indices with same criteria index = [i for i,x in enumerate(criteria) if x == crit] ax.scatter(np.array(lon)[index],np.array(lat)[index], zorder = 10, label = crit, transform=data_crs, facecolor = palette[palette['criteria']==crit]['color'].iloc[0], marker=palette[palette['criteria']==crit]['marker'].iloc[0], **scatter_kwargs) if legend == True: ax.legend(**lgd_kwargs) else: ax.legend().remove() if 'fig' in locals(): if 'path' in savefig_settings: savefig(fig, settings=savefig_settings) else: if not mute: showfig(fig) return fig, ax else: return ax
''' Tests for pyleoclim.core.ui.Scalogram Naming rules: 1. class: Test{filename}{Class}{method} with appropriate camel case 2. function: test_{method}_t{test_id} Notes on how to test: 0. Make sure [pytest](https://docs.pytest.org) has been installed: `pip install pytest` 1. execute `pytest {directory_path}` in terminal to perform all tests in all testing files inside the specified directory 2. execute `pytest {file_path}` in terminal to perform all tests in the specified file 3. execute `pytest {file_path}::{TestClass}::{test_method}` in terminal to perform a specific test class/method inside the specified file 4. after `pip install pytest-xdist`, one may execute "pytest -n 4" to test in parallel with number of workers specified by `-n` 5. for more details, see https://docs.pytest.org/en/stable/usage.html ''' import numpy as np import pandas as pd from numpy.testing import assert_array_equal from pandas.testing import assert_frame_equal import pytest import pyleoclim as pyleo from pyleoclim.utils.tsmodel import ( ar1_sim, colored_noise, ) # a collection of useful functions def gen_normal(loc=0, scale=1, nt=100): ''' Generate random data with a Gaussian distribution ''' t = np.arange(nt) v = np.random.normal(loc=loc, scale=scale, size=nt) return t, v def gen_colored_noise(alpha=1, nt=100, f0=None, m=None, seed=None): ''' Generate colored noise ''' t = np.arange(nt) v = colored_noise(alpha=alpha, t=t, f0=f0, m=m, seed=seed) return t, v # Tests below class TestUiScalogramSignifTest: ''' Tests for Scalogram.signif_test() ''' def test_signif_test_t0(self): ''' Test scalogram.signif_test() with default parameters ''' alpha = 1 t, v = gen_colored_noise(nt=100, alpha=alpha) ts = pyleo.Series(time=t, value=v) scal = ts.wavelet() scal_signif = scal.signif_test(number=1)
LinkedEarth/Pyleoclim_util
pyleoclim/tests/test_ui_Scalogram.py
pyleoclim/utils/mapping.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import with_statement from decimal import Decimal from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.functional import cached_property from django.utils.translation import ugettext_lazy as _ from parler.models import TranslatableModel, TranslatedFields from shuup.core.fields import InternalIdentifierField from shuup.utils.numbers import bankers_round, parse_decimal_string __all__ = ("SalesUnit",) @python_2_unicode_compatible class SalesUnit(TranslatableModel): identifier = InternalIdentifierField(unique=True) decimals = models.PositiveSmallIntegerField(default=0, verbose_name=_(u"allowed decimals")) translations = TranslatedFields( name=models.CharField(max_length=128, verbose_name=_('name')), short_name=models.CharField(max_length=128, verbose_name=_('short name')), ) class Meta: verbose_name = _('sales unit') verbose_name_plural = _('sales units') def __str__(self): return self.safe_translation_getter("name", default=None) @property def allow_fractions(self): return self.decimals > 0 @cached_property def quantity_step(self): """ Get the quantity increment for the amount of decimals this unit allows. For 0 decimals, this will be 1; for 1 decimal, 0.1; etc. :return: Decimal in (0..1] :rtype: Decimal """ # This particular syntax (`10 ^ -n`) is the same that `bankers_round` uses # to figure out the quantizer. return Decimal(10) ** (-int(self.decimals)) def round(self, value): return bankers_round(parse_decimal_string(value), self.decimals)
# -*- coding: utf-8 -*- import pytest from django.forms import formset_factory from shuup.admin.modules.products.forms import ( PackageChildForm, PackageChildFormSet ) from shuup.admin.modules.products.utils import clear_existing_package from shuup.core.models import ProductMode from shuup.testing.factories import create_product from shuup.utils.excs import Problem from shuup_tests.utils import printable_gibberish from shuup_tests.utils.forms import get_form_data @pytest.mark.django_db def test_package_child_formset(): FormSet = formset_factory(PackageChildForm, PackageChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) child = create_product(printable_gibberish()) # No products in the package formset = FormSet(parent_product=parent) assert formset.initial_form_count() == 0 # No children yet assert not parent.get_all_package_children() data = dict(get_form_data(formset, True), **{"form-0-child": child.pk, "form-0-quantity": 2}) formset = FormSet(parent_product=parent, data=data) formset.save() assert parent.get_all_package_children() clear_existing_package(parent) assert not parent.get_all_package_children() @pytest.mark.django_db def test_product_not_in_normal_mode(): FormSet = formset_factory(PackageChildForm, PackageChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) child_1 = create_product(printable_gibberish()) child_1.link_to_parent(parent) child_2 = create_product(printable_gibberish()) parent.verify_mode() assert parent.mode == ProductMode.SIMPLE_VARIATION_PARENT # Trying to create a package from a non-normal mode product with pytest.raises(Problem): formset = FormSet(parent_product=parent) data = dict(get_form_data(formset, True), **{"form-0-child": child_2.pk, "form-0-quantity": 2}) formset = FormSet(parent_product=parent, data=data) formset.save() @pytest.mark.django_db def test_cannot_add_product_to_own_package(rf): FormSet = formset_factory(PackageChildForm, PackageChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) # No products in the package formset = FormSet(parent_product=parent) assert formset.initial_form_count() == 0 # No children yet assert not parent.get_all_package_children() # Try to add a product to its own package data = dict(get_form_data(formset, True), **{"form-0-child": parent.pk, "form-0-quantity": 2}) formset = FormSet(parent_product=parent, data=data) formset.save() assert not parent.get_all_package_children()
hrayr-artunyan/shuup
shuup_tests/admin/test_product_package.py
shuup/core/models/_units.py
# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals import decimal from django.core.exceptions import ValidationError from django.db import models from django.utils.translation import ugettext_lazy as _ from enumfields import Enum from parler.models import TranslatableModel, TranslatedField, TranslatedFields from shuup.core.fields import MeasurementField, MoneyValueField from ._service_base import ( ServiceBehaviorComponent, ServiceCost, TranslatableServiceBehaviorComponent ) class FixedCostBehaviorComponent(TranslatableServiceBehaviorComponent): name = _("Fixed cost") help_text = _("Add fixed cost to price of the service.") price_value = MoneyValueField() description = TranslatedField(any_language=True) translations = TranslatedFields( description=models.CharField(max_length=100, blank=True, verbose_name=_("description")), ) def get_costs(self, service, source): price = source.create_price(self.price_value) description = self.safe_translation_getter('description') yield ServiceCost(price, description) class WaivingCostBehaviorComponent(TranslatableServiceBehaviorComponent): name = _("Waiving cost") help_text = _( "Add cost to price of the service if total price " "of products is less than a waive limit.") price_value = MoneyValueField() waive_limit_value = MoneyValueField() description = TranslatedField(any_language=True) translations = TranslatedFields( description=models.CharField(max_length=100, blank=True, verbose_name=_("description")), ) def get_costs(self, service, source): waive_limit = source.create_price(self.waive_limit_value) product_total = source.total_price_of_products price = source.create_price(self.price_value) description = self.safe_translation_getter('description') zero_price = source.create_price(0) if product_total and product_total >= waive_limit: yield ServiceCost(zero_price, description, base_price=price) else: yield ServiceCost(price, description) class WeightLimitsBehaviorComponent(ServiceBehaviorComponent): name = _("Weight limits") help_text = _( "Limit availability of the service based on " "total weight of products.") min_weight = models.DecimalField( max_digits=36, decimal_places=6, blank=True, null=True, verbose_name=_("minimum weight")) max_weight = models.DecimalField( max_digits=36, decimal_places=6, blank=True, null=True, verbose_name=_("maximum weight")) def get_unavailability_reasons(self, service, source): weight = sum(((x.get("weight") or 0) for x in source.get_lines()), 0) if self.min_weight: if weight < self.min_weight: yield ValidationError(_("Minimum weight not met."), code="min_weight") if self.max_weight: if weight > self.max_weight: yield ValidationError(_("Maximum weight exceeded."), code="max_weight") class WeightBasedPriceRange(TranslatableModel): component = models.ForeignKey( "WeightBasedPricingBehaviorComponent", related_name="ranges", on_delete=models.CASCADE ) min_value = MeasurementField(unit="g", verbose_name=_("min weight"), blank=True, null=True) max_value = MeasurementField(unit="g", verbose_name=_("max weight"), blank=True, null=True) price_value = MoneyValueField() description = TranslatedField(any_language=True) translations = TranslatedFields( description=models.CharField(max_length=100, blank=True, verbose_name=_("description")), ) def matches_to_value(self, value): return _is_in_range(value, self.min_value, self.max_value) def _is_in_range(value, min_value, max_value): """ Help function to check if the ``WeightBasedPriceRange`` matches If min_value is None the max_value determines if the range matches. None as a max_value represents infinity. Min value is counted in range only when it's zero. Max value is always part of the range. :type value: decimal.Decimal :type min_value: MeasurementField :type max_value: MeasurementField :rtype: bool """ if value is None: return False if (not (min_value or max_value)) or (min_value == max_value == value): return True if (not min_value or value > min_value) and (max_value is None or value <= max_value): return True return False class WeightBasedPricingBehaviorComponent(ServiceBehaviorComponent): name = _("Weight-based pricing") help_text = _( "Define price based on basket weight. " "Range minimums is counted in range only as zero.") def _get_matching_range_with_lowest_price(self, source): total_gross_weight = source.total_gross_weight matching_ranges = [range for range in self.ranges.all() if range.matches_to_value(total_gross_weight)] if not matching_ranges: return return min(matching_ranges, key=lambda x: x.price_value) def get_costs(self, service, source): range = self._get_matching_range_with_lowest_price(source) if range: price = source.create_price(range.price_value) description = range.safe_translation_getter('description') yield ServiceCost(price, description) def get_unavailability_reasons(self, service, source): range = self._get_matching_range_with_lowest_price(source) if not range: yield ValidationError(_("Weight does not match with any range."), code="out_of_range") class GroupAvailabilityBehaviorComponent(ServiceBehaviorComponent): name = _("Contact group availability") help_text = _("Limit service availability for specific contact groups.") groups = models.ManyToManyField("ContactGroup", verbose_name=_("groups")) def get_unavailability_reasons(self, service, source): if source.customer and not source.customer.pk: yield ValidationError(_("Customer does not belong to any group.")) return customer_groups = set(source.customer.groups.all().values_list("pk", flat=True)) groups_to_match = set(self.groups.all().values_list("pk", flat=True)) if not bool(customer_groups & groups_to_match): yield ValidationError(_("Service is not available for any of the customers groups.")) class StaffOnlyBehaviorComponent(ServiceBehaviorComponent): name = _("Staff only availability") help_text = _("Limit service availability to staff only") def get_unavailability_reasons(self, service, source): if not source.creator or not source.creator.is_staff: yield ValidationError(_("Service is only available for staff")) class RoundingMode(Enum): ROUND_HALF_UP = decimal.ROUND_HALF_UP ROUND_HALF_DOWN = decimal.ROUND_HALF_DOWN ROUND_UP = decimal.ROUND_UP ROUND_DOWN = decimal.ROUND_DOWN class Labels: ROUND_HALF_UP = _("round to nearest with ties going away from zero") ROUND_HALF_DOWN = _("round to nearest with ties going towards zero") ROUND_UP = _("round away from zero") ROUND_DOWN = _("round towards zero")
# -*- coding: utf-8 -*- import pytest from django.forms import formset_factory from shuup.admin.modules.products.forms import ( PackageChildForm, PackageChildFormSet ) from shuup.admin.modules.products.utils import clear_existing_package from shuup.core.models import ProductMode from shuup.testing.factories import create_product from shuup.utils.excs import Problem from shuup_tests.utils import printable_gibberish from shuup_tests.utils.forms import get_form_data @pytest.mark.django_db def test_package_child_formset(): FormSet = formset_factory(PackageChildForm, PackageChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) child = create_product(printable_gibberish()) # No products in the package formset = FormSet(parent_product=parent) assert formset.initial_form_count() == 0 # No children yet assert not parent.get_all_package_children() data = dict(get_form_data(formset, True), **{"form-0-child": child.pk, "form-0-quantity": 2}) formset = FormSet(parent_product=parent, data=data) formset.save() assert parent.get_all_package_children() clear_existing_package(parent) assert not parent.get_all_package_children() @pytest.mark.django_db def test_product_not_in_normal_mode(): FormSet = formset_factory(PackageChildForm, PackageChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) child_1 = create_product(printable_gibberish()) child_1.link_to_parent(parent) child_2 = create_product(printable_gibberish()) parent.verify_mode() assert parent.mode == ProductMode.SIMPLE_VARIATION_PARENT # Trying to create a package from a non-normal mode product with pytest.raises(Problem): formset = FormSet(parent_product=parent) data = dict(get_form_data(formset, True), **{"form-0-child": child_2.pk, "form-0-quantity": 2}) formset = FormSet(parent_product=parent, data=data) formset.save() @pytest.mark.django_db def test_cannot_add_product_to_own_package(rf): FormSet = formset_factory(PackageChildForm, PackageChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) # No products in the package formset = FormSet(parent_product=parent) assert formset.initial_form_count() == 0 # No children yet assert not parent.get_all_package_children() # Try to add a product to its own package data = dict(get_form_data(formset, True), **{"form-0-child": parent.pk, "form-0-quantity": 2}) formset = FormSet(parent_product=parent, data=data) formset.save() assert not parent.get_all_package_children()
hrayr-artunyan/shuup
shuup_tests/admin/test_product_package.py
shuup/core/models/_service_behavior.py
# -*- coding: utf-8 -*- # Author: Michael Malocha <mjm159@humboldt.edu> # Last Edit: September 22nd, 2013 # # This module was developed with funding from the GSOC 2013 summer of code # """ This module is meant to parse the HELIO registry and return WSDL endpoints to facilitate the interfacing between further modules and HELIO. """ from __future__ import absolute_import, print_function import xml.etree.ElementTree as EL from bs4 import BeautifulSoup from contextlib import closing from sunpy.net.helio import registry_links as RL from sunpy.extern.six.moves import urllib __author__ = 'Michael Malocha' __version__ = 'September 22nd, 2013' # Lifespan in seconds before a link times-out LINK_TIMEOUT = 3 def webservice_parser(service='HEC'): """ Quickly parses important contents from HELIO registry. Uses the link contained in registry_links in with 'service' appended and scrapes the web-service links contained on that webpage. Parameters ---------- service: str Indicates which particular HELIO service is used. Defaults to HEC. Returns ------- links: list or NoneType List of urls to registries containing WSDL endpoints. Examples -------- >>> from sunpy.net.helio import parser >>> parser.webservice_parser() ['http://msslkz.mssl.ucl.ac.uk/helio-hec/HelioService', 'http://festung3.oats.inaf.it:8080/helio-hec/HelioService', 'http://festung1.oats.inaf.it:8080/helio-hec/HelioService', 'http://hec.helio-vo.eu/helio_hec/HelioService', 'http://msslkz.mssl.ucl.ac.uk/helio-hec/HelioLongQueryService', 'http://festung3.oats.inaf.it:8080/helio-hec/HelioLongQueryService', 'http://festung1.oats.inaf.it:8080/helio-hec/HelioLongQueryService', 'http://hec.helio-vo.eu/helio_hec/HelioLongQueryService'] """ link = RL.LINK + '/' + service.lower() xml = link_test(link) if xml is None: return xml root = EL.fromstring(xml) links = [] #WARNING: getiterator is deprecated in Python 2.7+ #Fix for 3.x support for interface in root.getiterator('interface'): service_type = interface.attrib key = list(service_type.keys()) if len(key) > 0: value = service_type[key[0]] if value == 'vr:WebService': for url in interface.getiterator('accessURL'): if url.text not in links: links.append(url.text) return links def endpoint_parser(link): """ Takes a link to a list of endpoints and parses the WSDL links. Feeding 1 result from webservice_parser() into endpoint_parser() at a time will return a list of WSDL endpoints that are contained on the page from that link that was passed in. Parameters ---------- link: str A url to a page containing links to WSDL files. Returns ------- endpoints: list or NoneType A list containing all of the available WSDL endpoints from the passed in url. Examples -------- >>> from sunpy.net.helio import parser >>> parser.endpoint_parser('http://msslkz.mssl.ucl.ac.uk/helio-hec/HelioService') ['http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioService?wsdl', 'http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioService1_0?wsdl', 'http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioService1_0b?wsdl', 'http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioLongQueryService?wsdl', 'http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioLongQueryService1_0?wsdl', 'http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioLongQueryService1_1?wsdl', 'http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioLongQueryService1_0b?wsdl', 'http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioTavernaService?wsdl'] """ endpoint_page = link_test(link) if endpoint_page is None: return None soup = BeautifulSoup(endpoint_page) endpoints = [] for web_link in soup.find_all('a'): endpoints.append(web_link.get('href')) return endpoints def taverna_parser(link): """ Takes a link to a list of endpoints and parses the taverna WSDL links. Takes a url to a page containing a list of endpoints, then passes that url to endpoint_parser(). Upon receiving the resulting list from the parser taverna_parser() goes through the list and finds all the WSDL links for the taverna web-service. It then returns a list containing the filtered links. Parameters ---------- link: str A url to a page containing links to WSDL files. Returns ------- taverna_links: list or NoneType A list containing WSDL links for a taverna web-service Examples -------- >>> from sunpy.net.helio import parser >>> parser.taverna_parser('http://msslkz.mssl.ucl.ac.uk/helio-hec/HelioService') ['http://msslkz.mssl.ucl.ac.uk:80/helio-hec/HelioTavernaService?wsdl'] """ endpoints = endpoint_parser(link) taverna_links = [] if endpoints is None: return None for web_link in endpoints: if 'Taverna' in web_link: taverna_links.append(web_link) if len(taverna_links) == 0: return None return taverna_links def link_test(link): """ Just a quick function to test a link. Quickly checks to see if the URL is a valid link; if it is it returns the downloaded contents of that page. Parameters ---------- link: str A string containing a URL Returns ------- webpage: str or NoneType String containing the webresults Examples -------- >>> from sunpy.net.helio import parser >>> parser.link_test('http://msslkz.mssl.ucl.ac.uk/helio-hec/HelioService') u'<html>\n<head>...</body>\n</html>\n' >>> print(parser.link_test('http://rrnx.invalid_url5523.com')) None """ try: with closing(urllib.request.urlopen(link, timeout=LINK_TIMEOUT)) as fd: return fd.read() except (ValueError, urllib.error.URLError): return None def wsdl_retriever(service='HEC'): """ Retrieves a link to a taverna WSDL file This is essentially the master method, from it all the other functions get called and it essentially knits everything together. It gets a list of service links via webservice_parser(), then filters the results via taverna_parser(). Finally it tests all the returned taverna WSDL links and returns the first live taverna endpoint. Parameters ---------- service: str Indicates which particular HELIO service is used. Defaults to HEC. Returns ------- wsdl: str URL to a single live taverna endpoint Examples -------- >>> from sunpy.net.helio import parser >>> parser.wsdl_retriever() 'http://msslkz.mssl.ucl.ac.uk:80/helio_hec/HelioTavernaService?wsdl' Notes ----- * Currently only support for HEC exists, but it was designed so that it could be expanded at a later date * There is a 3 second timeout lifespan on links, so there is potential for this function to take a while to return. Timeout duration can be controlled through the LINK_TIMEOUT value """ service_links = webservice_parser(service=service) wsdl = None wsdl_links = None if service_links is None: return None for link in service_links: wsdl_links = taverna_parser(link) if wsdl_links is None: return None for end_point in wsdl_links: if end_point is not None and link_test(end_point) is not None: wsdl = end_point break return wsdl
from __future__ import absolute_import import pytest from sunpy.net.helio import hec import sunpy.net.helio.parser as p def test_suds_unwrapper(): suds_output = """<?xml version="1.0" encoding="UTF-8"?> <S:Envelope ..... > <S:Body> <helio:queryResponse ... > <VOTABLE xmlns="http://www.ivoa.net/xml/VOTable/v1.1" version="1.1"> <RESOURCE> ... </RESOURCE> </VOTABLE> </helio:queryResponse> </S:Body> </S:Envelope> """ expected_output = """<?xml version="1.0" encoding="UTF-8"?> <VOTABLE xmlns="http://www.ivoa.net/xml/VOTable/v1.1" version="1.1"> <RESOURCE> ... </RESOURCE> </VOTABLE> """ assert hec.suds_unwrapper(suds_output) == expected_output @pytest.mark.online def test_webservice_parser(): result = p.webservice_parser() assert isinstance(result,list)
Alex-Ian-Hamilton/sunpy
sunpy/net/tests/test_helio.py
sunpy/net/helio/parser.py
# # @BEGIN LICENSE # # Psi4: an open-source quantum chemistry software package # # Copyright (c) 2007-2019 The Psi4 Developers. # # The copyrights for code used from other parties are included in # the corresponding files. # # This file is part of Psi4. # # Psi4 is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, version 3. # # Psi4 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with Psi4; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # @END LICENSE # """ | Database (Truhlar) of hydrogen-transfer barrier height reactions. | Geometries from Truhlar and coworkers at site http://t1.chem.umn.edu/misc/database_group/database_therm_bh/raw_geom.cgi . | Reference energies from Zhao et al. JPCA, 109 2012-2018 (2005) doi: 10.1021/jp045141s [in supporting information]. - **cp** ``'off'`` - **rlxd** ``'off'`` - **subset** - ``'small'`` - ``'large'`` """ import re import qcdb # <<< HTBH Database Module >>> dbse = 'HTBH' isOS = 'true' # <<< Database Members >>> HRXN = range(1, 39) HRXN_SM = ['5', '6', '9', '10', '23', '24'] HRXN_LG = ['13', '14', '33', '34', '37', '38'] # <<< Chemical Systems Involved >>> RXNM = {} # reaction matrix of reagent contributions per reaction ACTV = {} # order of active reagents per reaction ACTV['%s-%s' % (dbse, 1)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'HCl' ), '%s-%s-reagent' % (dbse, 'HHClts') ] RXNM['%s-%s' % (dbse, 1)] = dict(zip(ACTV['%s-%s' % (dbse, 1)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 2)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'Cl' ), '%s-%s-reagent' % (dbse, 'HHClts') ] RXNM['%s-%s' % (dbse, 2)] = dict(zip(ACTV['%s-%s' % (dbse, 2)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 3)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'OHH2ts') ] RXNM['%s-%s' % (dbse, 3)] = dict(zip(ACTV['%s-%s' % (dbse, 3)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 4)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'OHH2ts') ] RXNM['%s-%s' % (dbse, 4)] = dict(zip(ACTV['%s-%s' % (dbse, 4)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 5)] = ['%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'CH3H2ts') ] RXNM['%s-%s' % (dbse, 5)] = dict(zip(ACTV['%s-%s' % (dbse, 5)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 6)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'CH3H2ts') ] RXNM['%s-%s' % (dbse, 6)] = dict(zip(ACTV['%s-%s' % (dbse, 6)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 7)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'OHCH4ts') ] RXNM['%s-%s' % (dbse, 7)] = dict(zip(ACTV['%s-%s' % (dbse, 7)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 8)] = ['%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'OHCH4ts') ] RXNM['%s-%s' % (dbse, 8)] = dict(zip(ACTV['%s-%s' % (dbse, 8)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 9)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'HH2ts') ] RXNM['%s-%s' % (dbse, 9)] = dict(zip(ACTV['%s-%s' % (dbse, 9)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 10)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'HH2ts') ] RXNM['%s-%s' % (dbse, 10)] = dict(zip(ACTV['%s-%s' % (dbse, 10)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 11)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'NH3' ), '%s-%s-reagent' % (dbse, 'OHNH3ts') ] RXNM['%s-%s' % (dbse, 11)] = dict(zip(ACTV['%s-%s' % (dbse, 11)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 12)] = ['%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'OHNH3ts') ] RXNM['%s-%s' % (dbse, 12)] = dict(zip(ACTV['%s-%s' % (dbse, 12)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 13)] = ['%s-%s-reagent' % (dbse, 'HCl' ), '%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'HClCH3ts') ] RXNM['%s-%s' % (dbse, 13)] = dict(zip(ACTV['%s-%s' % (dbse, 13)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 14)] = ['%s-%s-reagent' % (dbse, 'Cl' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'HClCH3ts') ] RXNM['%s-%s' % (dbse, 14)] = dict(zip(ACTV['%s-%s' % (dbse, 14)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 15)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'C2H6' ), '%s-%s-reagent' % (dbse, 'OHC2H6ts') ] RXNM['%s-%s' % (dbse, 15)] = dict(zip(ACTV['%s-%s' % (dbse, 15)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 16)] = ['%s-%s-reagent' % (dbse, 'H2O' ), '%s-%s-reagent' % (dbse, 'C2H5' ), '%s-%s-reagent' % (dbse, 'OHC2H6ts') ] RXNM['%s-%s' % (dbse, 16)] = dict(zip(ACTV['%s-%s' % (dbse, 16)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 17)] = ['%s-%s-reagent' % (dbse, 'F' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'FH2ts') ] RXNM['%s-%s' % (dbse, 17)] = dict(zip(ACTV['%s-%s' % (dbse, 17)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 18)] = ['%s-%s-reagent' % (dbse, 'HF' ), '%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'FH2ts') ] RXNM['%s-%s' % (dbse, 18)] = dict(zip(ACTV['%s-%s' % (dbse, 18)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 19)] = ['%s-%s-reagent' % (dbse, 'O' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'OHCH3ts') ] RXNM['%s-%s' % (dbse, 19)] = dict(zip(ACTV['%s-%s' % (dbse, 19)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 20)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'OHCH3ts') ] RXNM['%s-%s' % (dbse, 20)] = dict(zip(ACTV['%s-%s' % (dbse, 20)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 21)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'PH3' ), '%s-%s-reagent' % (dbse, 'HPH3ts') ] RXNM['%s-%s' % (dbse, 21)] = dict(zip(ACTV['%s-%s' % (dbse, 21)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 22)] = ['%s-%s-reagent' % (dbse, 'PH2' ), '%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'HPH3ts') ] RXNM['%s-%s' % (dbse, 22)] = dict(zip(ACTV['%s-%s' % (dbse, 22)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 23)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'OHHts') ] RXNM['%s-%s' % (dbse, 23)] = dict(zip(ACTV['%s-%s' % (dbse, 23)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 24)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'O' ), '%s-%s-reagent' % (dbse, 'OHHts') ] RXNM['%s-%s' % (dbse, 24)] = dict(zip(ACTV['%s-%s' % (dbse, 24)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 25)] = ['%s-%s-reagent' % (dbse, 'H' ), '%s-%s-reagent' % (dbse, 'H2S' ), '%s-%s-reagent' % (dbse, 'HH2Sts') ] RXNM['%s-%s' % (dbse, 25)] = dict(zip(ACTV['%s-%s' % (dbse, 25)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 26)] = ['%s-%s-reagent' % (dbse, 'H2' ), '%s-%s-reagent' % (dbse, 'HS' ), '%s-%s-reagent' % (dbse, 'HH2Sts') ] RXNM['%s-%s' % (dbse, 26)] = dict(zip(ACTV['%s-%s' % (dbse, 26)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 27)] = ['%s-%s-reagent' % (dbse, 'O' ), '%s-%s-reagent' % (dbse, 'HCl' ), '%s-%s-reagent' % (dbse, 'OHClts') ] RXNM['%s-%s' % (dbse, 27)] = dict(zip(ACTV['%s-%s' % (dbse, 27)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 28)] = ['%s-%s-reagent' % (dbse, 'OH' ), '%s-%s-reagent' % (dbse, 'Cl' ), '%s-%s-reagent' % (dbse, 'OHClts') ] RXNM['%s-%s' % (dbse, 28)] = dict(zip(ACTV['%s-%s' % (dbse, 28)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 29)] = ['%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'CH3NH2ts') ] RXNM['%s-%s' % (dbse, 29)] = dict(zip(ACTV['%s-%s' % (dbse, 29)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 30)] = ['%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'NH' ), '%s-%s-reagent' % (dbse, 'CH3NH2ts') ] RXNM['%s-%s' % (dbse, 30)] = dict(zip(ACTV['%s-%s' % (dbse, 30)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 31)] = ['%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'C2H5' ), '%s-%s-reagent' % (dbse, 'NH2C2H5ts') ] RXNM['%s-%s' % (dbse, 31)] = dict(zip(ACTV['%s-%s' % (dbse, 31)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 32)] = ['%s-%s-reagent' % (dbse, 'C2H6' ), '%s-%s-reagent' % (dbse, 'NH' ), '%s-%s-reagent' % (dbse, 'NH2C2H5ts') ] RXNM['%s-%s' % (dbse, 32)] = dict(zip(ACTV['%s-%s' % (dbse, 32)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 33)] = ['%s-%s-reagent' % (dbse, 'C2H6' ), '%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'C2H6NH2ts') ] RXNM['%s-%s' % (dbse, 33)] = dict(zip(ACTV['%s-%s' % (dbse, 33)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 34)] = ['%s-%s-reagent' % (dbse, 'NH3' ), '%s-%s-reagent' % (dbse, 'C2H5' ), '%s-%s-reagent' % (dbse, 'C2H6NH2ts') ] RXNM['%s-%s' % (dbse, 34)] = dict(zip(ACTV['%s-%s' % (dbse, 34)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 35)] = ['%s-%s-reagent' % (dbse, 'NH2' ), '%s-%s-reagent' % (dbse, 'CH4' ), '%s-%s-reagent' % (dbse, 'NH2CH4ts') ] RXNM['%s-%s' % (dbse, 35)] = dict(zip(ACTV['%s-%s' % (dbse, 35)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 36)] = ['%s-%s-reagent' % (dbse, 'CH3' ), '%s-%s-reagent' % (dbse, 'NH3' ), '%s-%s-reagent' % (dbse, 'NH2CH4ts') ] RXNM['%s-%s' % (dbse, 36)] = dict(zip(ACTV['%s-%s' % (dbse, 36)], [-1, -1, +1])) ACTV['%s-%s' % (dbse, 37)] = ['%s-%s-reagent' % (dbse, 'C5H8' ), '%s-%s-reagent' % (dbse, 'C5H8ts') ] RXNM['%s-%s' % (dbse, 37)] = dict(zip(ACTV['%s-%s' % (dbse, 37)], [-1, +1])) ACTV['%s-%s' % (dbse, 38)] = ['%s-%s-reagent' % (dbse, 'C5H8' ), '%s-%s-reagent' % (dbse, 'C5H8ts') ] RXNM['%s-%s' % (dbse, 38)] = dict(zip(ACTV['%s-%s' % (dbse, 38)], [-1, +1])) # <<< Reference Values [kcal/mol] >>> BIND = {} BIND['%s-%s' % (dbse, 1)] = 5.7 BIND['%s-%s' % (dbse, 2)] = 8.7 BIND['%s-%s' % (dbse, 3)] = 5.1 BIND['%s-%s' % (dbse, 4)] = 21.2 BIND['%s-%s' % (dbse, 5)] = 12.1 BIND['%s-%s' % (dbse, 6)] = 15.3 BIND['%s-%s' % (dbse, 7)] = 6.7 BIND['%s-%s' % (dbse, 8)] = 19.6 BIND['%s-%s' % (dbse, 9)] = 9.6 BIND['%s-%s' % (dbse, 10)] = 9.6 BIND['%s-%s' % (dbse, 11)] = 3.2 BIND['%s-%s' % (dbse, 12)] = 12.7 BIND['%s-%s' % (dbse, 13)] = 1.7 BIND['%s-%s' % (dbse, 14)] = 7.9 BIND['%s-%s' % (dbse, 15)] = 3.4 BIND['%s-%s' % (dbse, 16)] = 19.9 BIND['%s-%s' % (dbse, 17)] = 1.8 BIND['%s-%s' % (dbse, 18)] = 33.4 BIND['%s-%s' % (dbse, 19)] = 13.7 BIND['%s-%s' % (dbse, 20)] = 8.1 BIND['%s-%s' % (dbse, 21)] = 3.1 BIND['%s-%s' % (dbse, 22)] = 23.2 BIND['%s-%s' % (dbse, 23)] = 10.7 BIND['%s-%s' % (dbse, 24)] = 13.1 BIND['%s-%s' % (dbse, 25)] = 3.5 BIND['%s-%s' % (dbse, 26)] = 17.3 BIND['%s-%s' % (dbse, 27)] = 9.8 BIND['%s-%s' % (dbse, 28)] = 10.4 BIND['%s-%s' % (dbse, 29)] = 8.0 BIND['%s-%s' % (dbse, 30)] = 22.4 BIND['%s-%s' % (dbse, 31)] = 7.5 BIND['%s-%s' % (dbse, 32)] = 18.3 BIND['%s-%s' % (dbse, 33)] = 10.4 BIND['%s-%s' % (dbse, 34)] = 17.4 BIND['%s-%s' % (dbse, 35)] = 14.5 BIND['%s-%s' % (dbse, 36)] = 17.8 BIND['%s-%s' % (dbse, 37)] = 38.4 BIND['%s-%s' % (dbse, 38)] = 38.4 # <<< Comment Lines >>> TAGL = {} TAGL['%s-%s' % (dbse, 1)] = '{ H + HCl <-- [HHCl] } --> H2 + Cl' TAGL['%s-%s' % (dbse, 2)] = 'H + HCl <-- { [HHCl] --> H2 + Cl }' TAGL['%s-%s' % (dbse, 3)] = '{ OH + H2 <-- [OHH2] } --> H + H2O' TAGL['%s-%s' % (dbse, 4)] = 'OH + HCl <-- { [OHH2] --> H + H2O }' TAGL['%s-%s' % (dbse, 5)] = '{ CH3 + H2 <-- [CH3H2] } --> H + CH4' TAGL['%s-%s' % (dbse, 6)] = 'CH3 + H2 <-- { [CH3H2] --> H + CH4 }' TAGL['%s-%s' % (dbse, 7)] = '{ OH + CH4 <-- [OHCH4] } --> CH3 + H2O' TAGL['%s-%s' % (dbse, 8)] = 'OH + CH4 <-- { [OHCH4] --> CH3 + H2O }' TAGL['%s-%s' % (dbse, 9)] = '{ H + H2 <-- [HH2] } --> H2 + H' TAGL['%s-%s' % (dbse, 10)] = 'H + H2 <-- { [HH2] -- >H2 + H }' TAGL['%s-%s' % (dbse, 11)] = '{ OH + NH3 <-- [OHNH3] } --> H2O + NH2' TAGL['%s-%s' % (dbse, 12)] = 'OH + NH3 <-- { [OHNH3] --> H2O + NH2 }' TAGL['%s-%s' % (dbse, 13)] = '{ HCl + CH3 <-- [HClCH3] } --> Cl + CH4' TAGL['%s-%s' % (dbse, 14)] = 'HCl + CH3 <-- { [HClCH3] --> Cl + CH4 }' TAGL['%s-%s' % (dbse, 15)] = '{ OH + C2H6 <-- [OHC2H6] } --> H2O + C2H5' TAGL['%s-%s' % (dbse, 16)] = 'OH + C2H6 <-- { [OHC2H6] --> H2O + C2H5 }' TAGL['%s-%s' % (dbse, 17)] = '{ F + H2 <-- [FH2] } --> HF + H' TAGL['%s-%s' % (dbse, 18)] = 'F + H2 <-- { [FH2] --> HF + H}' TAGL['%s-%s' % (dbse, 19)] = '{ O + CH4 <-- [OHCH3] } --> OH + CH3' TAGL['%s-%s' % (dbse, 20)] = 'O + CH4 <-- { [OHCH3] --> OH + CH3 }' TAGL['%s-%s' % (dbse, 21)] = '{ H + PH3 <-- [HPH3] } --> PH2 + H2' TAGL['%s-%s' % (dbse, 22)] = 'H + PH3 <-- { [HPH3] --> PH2 + H2 }' TAGL['%s-%s' % (dbse, 23)] = '{ H + OH <-- [OHH] } --> H2 + O' TAGL['%s-%s' % (dbse, 24)] = 'H + OH <-- { [OHH] --> H2 + O }' TAGL['%s-%s' % (dbse, 25)] = '{ H + H2S <-- [HH2S] } --> H2 + HS' TAGL['%s-%s' % (dbse, 26)] = 'H + H2S <-- { [HH2S] --> H2 + HS}' TAGL['%s-%s' % (dbse, 27)] = '{ O + HCl <-- [OHCl] } --> OH + Cl' TAGL['%s-%s' % (dbse, 28)] = 'O + HCl <-- { [OHCl] --> OH + Cl}' TAGL['%s-%s' % (dbse, 29)] = '{ NH2 + CH3 <-- [CH3NH2] } --> CH4 + NH' TAGL['%s-%s' % (dbse, 30)] = 'NH2 + CH3 <-- { [CH3NH2] --> CH4 + NH }' TAGL['%s-%s' % (dbse, 31)] = '{ NH2 + C2H5 <-- [NH2C2H5] } --> C2H6 + NH' TAGL['%s-%s' % (dbse, 32)] = 'NH2 + C2H5 <-- { [NH2C2H5] --> C2H6 + NH }' TAGL['%s-%s' % (dbse, 33)] = '{ C2H6 + NH2 <-- [C2H6NH2] } --> NH3 + C2H5' TAGL['%s-%s' % (dbse, 34)] = 'C2H6 + NH2 <-- { [C2H6NH2] --> NH3 + C2H5 }' TAGL['%s-%s' % (dbse, 35)] = '{ NH2 + CH4 <-- [NH2CH4] } --> CH3 + NH3' TAGL['%s-%s' % (dbse, 36)] = 'NH2 + CH4 <-- { [NH2CH4] --> CH3 + NH3 }' TAGL['%s-%s' % (dbse, 37)] = '{ C5H8 <-- [C5H8] } --> C5H8' TAGL['%s-%s' % (dbse, 38)] = 'C5H8 <-- { [C5H8] --> C5H8 }' TAGL['%s-%s-reagent' % (dbse, 'C2H5' )] = 'C2H5' TAGL['%s-%s-reagent' % (dbse, 'C2H6' )] = 'Ethane' TAGL['%s-%s-reagent' % (dbse, 'C2H6NH2ts' )] = 'Transition state of C2H6 + NH2 <--> NH3 + C2H5' TAGL['%s-%s-reagent' % (dbse, 'C5H8' )] = 's-trans cis-C5H8' TAGL['%s-%s-reagent' % (dbse, 'C5H8ts' )] = 'Transition state of s-trans cis-C5H8 <--> s-trans cis C5H8' TAGL['%s-%s-reagent' % (dbse, 'CH3' )] = 'CH3' TAGL['%s-%s-reagent' % (dbse, 'CH3H2ts' )] = 'Transition state of CH3 + H2 <--> H + CH4' TAGL['%s-%s-reagent' % (dbse, 'CH3NH2ts' )] = 'Transition state of CH3 + NH2 <--> CH4 + NH' TAGL['%s-%s-reagent' % (dbse, 'CH4' )] = 'Methane' TAGL['%s-%s-reagent' % (dbse, 'Cl' )] = 'Chlorine atom' TAGL['%s-%s-reagent' % (dbse, 'F' )] = 'Fluorine atom' TAGL['%s-%s-reagent' % (dbse, 'FH2ts' )] = 'Transition state of F + H2 <--> HF + H' TAGL['%s-%s-reagent' % (dbse, 'H' )] = 'Hydrogen atom' TAGL['%s-%s-reagent' % (dbse, 'H2' )] = 'Hydrogen molecule' TAGL['%s-%s-reagent' % (dbse, 'H2O' )] = 'Water' TAGL['%s-%s-reagent' % (dbse, 'H2S' )] = 'Hydrogen Sulfide' TAGL['%s-%s-reagent' % (dbse, 'HCl' )] = 'Hydrogen Chloride' TAGL['%s-%s-reagent' % (dbse, 'HClCH3ts' )] = 'Transition state of HCl + CH3 <--> Cl + CH4' TAGL['%s-%s-reagent' % (dbse, 'HHClts' )] = 'Transition state of H + HCl <--> H2 + Cl' TAGL['%s-%s-reagent' % (dbse, 'HF' )] = 'Hydrogen Fluoride' TAGL['%s-%s-reagent' % (dbse, 'HH2Sts' )] = 'Transition state of H + H2S <--> H2 + HS' TAGL['%s-%s-reagent' % (dbse, 'HH2ts' )] = 'Transition state of H + H2 <--> H2 + H' TAGL['%s-%s-reagent' % (dbse, 'NH' )] = 'NH' TAGL['%s-%s-reagent' % (dbse, 'HPH3ts' )] = 'Transition state of H + PH3 <--> PH2 + H2' TAGL['%s-%s-reagent' % (dbse, 'NH2' )] = 'NH2' TAGL['%s-%s-reagent' % (dbse, 'NH2C2H5ts' )] = 'Transition state of C2H5 + NH2 <--> NH + C2H6' TAGL['%s-%s-reagent' % (dbse, 'NH2CH4ts' )] = 'Transition state of CH4 + NH2 <--> NH3 + CH3' TAGL['%s-%s-reagent' % (dbse, 'NH3' )] = 'Ammonia' TAGL['%s-%s-reagent' % (dbse, 'O' )] = 'Oxygen atom' TAGL['%s-%s-reagent' % (dbse, 'OH' )] = 'OH' TAGL['%s-%s-reagent' % (dbse, 'OHC2H6ts' )] = 'Transition state of C2H6 + OH <--> H2O + C2H5' TAGL['%s-%s-reagent' % (dbse, 'OHCH3ts' )] = 'Transition state of O + CH4 <--> OH + CH3' TAGL['%s-%s-reagent' % (dbse, 'OHCH4ts' )] = 'Transition state of OH + CH4 <--> CH3 + H2O' TAGL['%s-%s-reagent' % (dbse, 'OHClts' )] = 'Transition state of O + HCl <--> OH + Cl' TAGL['%s-%s-reagent' % (dbse, 'OHH2ts' )] = 'Transition state of OH + H2 <--> H + H2O' TAGL['%s-%s-reagent' % (dbse, 'OHHts' )] = 'Transition state of OH + H <--> H2 + O' TAGL['%s-%s-reagent' % (dbse, 'OHNH3ts' )] = 'Transition state of OH + NH3 <--> NH2 + H2O' TAGL['%s-%s-reagent' % (dbse, 'PH2' )] = 'PH2' TAGL['%s-%s-reagent' % (dbse, 'PH3' )] = 'Phosphine' TAGL['%s-%s-reagent' % (dbse, 'HS' )] = 'HS' # <<< Geometry Specification Strings >>> GEOS = {} GEOS['%s-%s-reagent' % (dbse, 'C2H5')] = qcdb.Molecule(""" 0 2 C 0.00550995 -0.00307714 -0.77443959 C 0.00550995 -0.00307714 0.71569982 H 0.00550995 -1.01684444 1.11670108 H 0.37964525 0.84547158 -1.32730429 H -0.88217468 0.49798042 1.12141209 H 0.87299475 0.52193057 1.11660682 H -0.50718726 -0.77526005 -1.32801142 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C2H6')] = qcdb.Molecule(""" 0 1 C 0.00000020 -0.00000013 -0.76309187 C 0.00000020 -0.00000013 0.76309163 H 0.00000020 -1.01606691 1.15831231 H -0.87903844 -0.50959541 -1.15830943 H -0.87994508 0.50802887 1.15831013 H 0.87993813 0.50804049 1.15830883 H -0.00180313 1.01606605 -1.15830975 H 0.88084363 -0.50646996 -1.15830912 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C2H6NH2ts')] = qcdb.Molecule(""" 0 2 C -1.48570000 -0.44815600 -0.00001900 C -0.50504200 0.70174000 0.00002900 N 1.86516100 -0.34016700 -0.00005700 H -1.35419300 -1.07650500 -0.88050300 H -1.35415900 -1.07661100 0.88038500 H -2.51702500 -0.08617300 0.00002500 H -0.52222400 1.31611800 -0.89721800 H -0.52220500 1.31602900 0.89733800 H 0.66504700 0.14796100 -0.00003400 H 2.24664400 0.15971700 -0.80480600 H 2.24643900 0.15913300 0.80515100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C5H8')] = qcdb.Molecule(""" 0 1 C -2.05563800 -0.61227200 0.00000700 C -1.23109600 0.64044800 0.00004900 C 0.10563400 0.73427300 0.00002600 C 1.05755500 -0.37440700 -0.00004400 C 2.38358300 -0.19893600 -0.00003600 H -2.70508500 -0.64159700 0.87713200 H -2.70512900 -0.64150800 -0.87708900 H -1.45133200 -1.51607900 -0.00005500 H -1.79366500 1.56758600 0.00010300 H 0.54575600 1.72564300 0.00006400 H 0.66526200 -1.38324200 -0.00010500 H 3.06468900 -1.03771900 -0.00008800 H 2.81927500 0.79228500 0.00002300 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'C5H8ts')] = qcdb.Molecule(""" 0 1 C -1.29962300 -0.90485300 -0.02015500 C -1.20594700 0.50581700 -0.01341400 C 0.00000000 1.18336100 0.15330100 C 1.20594800 0.50581400 -0.01342200 C 1.29962600 -0.90485100 -0.02014700 H 2.16879700 -1.32754900 -0.51569700 H 1.03204100 -1.45438500 0.87316600 H 2.03713000 1.08558300 -0.39850400 H 0.00000100 2.26291300 0.08590500 H -2.03713300 1.08558700 -0.39848100 H -2.16879600 -1.32754000 -0.51571600 H -0.00001100 -1.18194200 -0.52080800 H -1.03205900 -1.45439400 0.87315800 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH3')] = qcdb.Molecule(""" 0 2 C 0.00000000 0.00000000 -0.00000000 H 0.00000000 0.00000000 1.07731727 H -0.00000000 0.93298412 -0.53865863 H 0.00000000 -0.93298412 -0.53865863 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH3H2ts')] = qcdb.Molecule(""" 0 2 C 0.00000000 0.26481300 0.00000000 H 1.05342900 0.51666800 0.00000000 H -0.52662700 0.51702500 0.91225000 H -0.52662700 0.51702500 -0.91225000 H -0.00026000 -1.11777100 0.00000000 H 0.00008400 -2.02182500 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH3NH2ts')] = qcdb.Molecule(""" 0 3 C -1.19957700 -0.01112600 -0.00003000 N 1.40071500 0.12986200 0.00001500 H -1.42666000 -0.51293200 0.93305700 H -1.41990700 -0.59138200 -0.88814300 H -1.52023700 1.02280600 -0.04578300 H 0.18892600 0.12689600 0.00100100 H 1.57033800 -0.88766700 -0.00005300 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'CH4')] = qcdb.Molecule(""" 0 1 C 0.00000000 0.00000000 0.00000000 H 0.00000000 1.08744517 0.00000000 H -0.51262657 -0.36248173 0.88789526 H -0.51262657 -0.36248173 -0.88789526 H 1.02525314 -0.36248173 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'Cl')] = qcdb.Molecule(""" 0 2 Cl 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'F')] = qcdb.Molecule(""" 0 2 F 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'FH2ts')] = qcdb.Molecule(""" 0 2 H 0.14656800 -1.12839000 0.00000000 F 0.00000000 0.33042200 0.00000000 H -0.14656800 -1.84541000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H')] = qcdb.Molecule(""" 0 2 H 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H2')] = qcdb.Molecule(""" 0 1 H 0.00000000 0.00000000 0.00000000 H 0.74187646 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H2O')] = qcdb.Molecule(""" 0 1 O 0.00000000 0.00000000 -0.06555155 H 0.00000000 -0.75670946 0.52017534 H 0.00000000 0.75670946 0.52017534 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'H2S')] = qcdb.Molecule(""" 0 1 S 0.00000000 0.00000000 0.10251900 H 0.00000000 0.96624900 -0.82015400 H 0.00000000 -0.96624900 -0.82015400 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HCl')] = qcdb.Molecule(""" 0 1 Cl 0.00000000 0.00000000 0.00000000 H 1.27444789 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HClCH3ts')] = qcdb.Molecule(""" 0 2 C 0.24411700 0.59991600 1.70242300 H -0.67559700 0.27848200 2.17293900 H 0.35191000 1.66378600 1.53767200 H 1.14068600 0.06578700 1.98782200 H 0.05716300 0.13997300 0.39711200 Cl -0.13758000 -0.33809000 -0.95941600 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HHClts')] = qcdb.Molecule(""" 0 2 H 0.00048000 -1.34062700 0.00000000 Cl 0.00000000 0.20325200 0.00000000 H -0.00048000 -2.11465900 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HF')] = qcdb.Molecule(""" 0 1 F 0.00000000 0.00000000 0.00000000 H 0.91538107 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HH2Sts')] = qcdb.Molecule(""" 0 2 H 1.26209700 -0.22009700 0.00000000 S 0.00000000 0.22315300 0.00000000 H -0.50057600 -1.11544500 0.00000000 H -0.76152100 -2.23491300 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HH2ts')] = qcdb.Molecule(""" 0 2 H 0.00000000 0.00000000 0.00000000 H 0.00000000 0.00000000 0.92947400 H 0.00000000 0.00000000 -0.92947400 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH')] = qcdb.Molecule(""" 0 3 N 0.00000000 0.00000000 0.00000000 H 1.03673136 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HPH3ts')] = qcdb.Molecule(""" 0 2 P 0.21742900 0.00008800 -0.11124900 H 0.24660900 1.03466800 0.85216400 H 0.26266100 -1.02505800 0.86162300 H -1.26641800 -0.01095200 -0.15062600 H -2.50429000 0.00002800 0.10557500 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH2')] = qcdb.Molecule(""" 0 2 N 0.00000000 0.00000000 -0.08007491 H 0.00000000 -0.80231373 0.55629442 H 0.00000000 0.80231373 0.55629442 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH2C2H5ts')] = qcdb.Molecule(""" 0 3 C -1.39498400 -0.44966100 0.00070300 C -0.43574600 0.71406300 0.00202700 N 1.92757000 -0.37835200 0.00303600 H -1.20008700 -1.12095100 -0.83568700 H -1.32209500 -1.02788400 0.92177300 H -2.42871300 -0.10535200 -0.08933400 H -0.41768800 1.30848200 -0.90720100 H -0.44112700 1.32909500 0.89746700 H 0.82850100 0.18059300 -0.02856100 H 2.47259200 0.49807300 0.00391000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH2CH4ts')] = qcdb.Molecule(""" 0 2 C -1.26075000 -0.00000600 0.01229100 N 1.31325500 -0.00000500 -0.13678200 H -1.58398700 0.90853800 -0.48474400 H -1.46367200 -0.00457300 1.07730200 H -1.58474800 -0.90388000 -0.49270000 H 0.04310800 -0.00006400 -0.15169200 H 1.48045900 0.80557700 0.46775100 H 1.48055700 -0.80552400 0.46780800 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'NH3')] = qcdb.Molecule(""" 0 1 N 0.00000000 0.00000000 0.11289000 H 0.00000000 0.93802400 -0.26340900 H 0.81235300 -0.46901200 -0.26340900 H -0.81235300 -0.46901200 -0.26340900 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'O')] = qcdb.Molecule(""" 0 3 O 0.00000000 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OH')] = qcdb.Molecule(""" 0 2 O 0.00000000 0.00000000 0.00000000 H 0.96889819 0.00000000 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHC2H6ts')] = qcdb.Molecule(""" 0 2 C 1.45833400 -0.44636500 0.02547800 C 0.46942300 0.69742200 -0.02749300 O -1.85303700 -0.31465900 -0.05305500 H 1.30176400 -1.06107900 0.91073700 H 1.36658500 -1.08618900 -0.85111800 H 2.48224500 -0.06687900 0.05715000 H 0.47106900 1.32544300 0.86103700 H 0.53352400 1.30349500 -0.92856000 H -0.63023200 0.20781600 -0.07846500 H -2.26720700 0.38832100 0.46575100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHCH3ts')] = qcdb.Molecule(""" 0 3 C 0.00029000 -1.14228900 0.00000000 H -1.05595700 -1.38473500 0.00000000 H 0.52016700 -1.40738900 0.91244700 H 0.52016700 -1.40738900 -0.91244700 H 0.01156000 0.16009900 0.00000000 O 0.00029000 1.36164300 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHCH4ts')] = qcdb.Molecule(""" 0 2 C -1.21148700 0.00796800 0.00040700 O 1.29396500 -0.10869400 0.00013300 H 0.00947600 -0.11802000 0.00279900 H -1.52552900 -0.23325000 1.01007000 H -1.43066500 1.03323300 -0.27808200 H -1.55271000 -0.71011400 -0.73770200 H 1.41663600 0.84989400 -0.00059100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHClts')] = qcdb.Molecule(""" 0 3 Cl 0.01882000 -0.81730100 0.00000000 H -0.47048800 0.56948000 0.00000000 O 0.01882000 1.66557900 0.00000000 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHH2ts')] = qcdb.Molecule(""" 0 2 O -0.30106400 -0.10804900 -0.00000800 H -0.42794500 0.85156900 0.00001600 H 1.01548600 -0.10036700 0.00011900 H 1.82096800 0.11318700 -0.00007300 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHHts')] = qcdb.Molecule(""" 0 3 H 0.00000000 0.00000000 -0.86028700 O 0.00000000 0.00000000 0.32902400 H 0.00000000 0.00000000 -1.77190500 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'OHNH3ts')] = qcdb.Molecule(""" 0 2 N -1.15081600 -0.04393200 -0.10255900 O 1.17918600 -0.09269600 -0.01029000 H -1.30318500 -0.54763800 0.76657100 H -1.33891300 0.93580800 0.09185400 H -0.03068700 -0.15383400 -0.35318400 H 1.29500900 0.81475300 0.29499100 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'PH2')] = qcdb.Molecule(""" 0 2 P 0.00000000 0.00000000 -0.11565700 H 1.02013000 0.00000000 0.86742700 H -1.02013000 0.00000000 0.86742700 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'PH3')] = qcdb.Molecule(""" 0 1 P 0.00000000 0.00000000 0.12641100 H 1.19133900 0.00000000 -0.63205600 H -0.59566900 -1.03173000 -0.63205600 H -0.59566900 1.03173000 -0.63205600 units angstrom """) GEOS['%s-%s-reagent' % (dbse, 'HS')] = qcdb.Molecule(""" 0 2 S 0.00000000 0.00000000 0.00000000 H 1.34020229 0.00000000 0.00000000 units angstrom """) ######################################################################### # <<< Supplementary Quantum Chemical Results >>> DATA = {} DATA['NUCLEAR REPULSION ENERGY'] = {} DATA['NUCLEAR REPULSION ENERGY']['HTBH-H-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HCl-reagent' ] = 7.05875275 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HHClts-reagent' ] = 10.39163823 DATA['NUCLEAR REPULSION ENERGY']['HTBH-H2-reagent' ] = 0.71329559 DATA['NUCLEAR REPULSION ENERGY']['HTBH-Cl-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OH-reagent' ] = 4.36931115 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHH2ts-reagent' ] = 10.73785396 DATA['NUCLEAR REPULSION ENERGY']['HTBH-H2O-reagent' ] = 9.19771594 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH3-reagent' ] = 9.69236444 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH3H2ts-reagent' ] = 15.32861238 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH4-reagent' ] = 13.46695412 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHCH4ts-reagent' ] = 37.11882096 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HH2ts-reagent' ] = 1.42332440 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH3-reagent' ] = 11.97232339 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHNH3ts-reagent' ] = 37.13900482 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH2-reagent' ] = 7.56429116 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HClCH3ts-reagent' ] = 46.25151943 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C2H6-reagent' ] = 42.29535986 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHC2H6ts-reagent' ] = 76.62129511 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C2H5-reagent' ] = 36.98165035 DATA['NUCLEAR REPULSION ENERGY']['HTBH-F-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-FH2ts-reagent' ] = 6.11540453 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HF-reagent' ] = 5.20285489 DATA['NUCLEAR REPULSION ENERGY']['HTBH-O-reagent' ] = 0.00000000 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHCH3ts-reagent' ] = 30.91033235 DATA['NUCLEAR REPULSION ENERGY']['HTBH-PH3-reagent' ] = 17.63061432 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HPH3ts-reagent' ] = 21.01063452 DATA['NUCLEAR REPULSION ENERGY']['HTBH-PH2-reagent' ] = 11.46498480 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHHts-reagent' ] = 6.15505787 DATA['NUCLEAR REPULSION ENERGY']['HTBH-H2S-reagent' ] = 12.94849742 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HH2Sts-reagent' ] = 16.45756641 DATA['NUCLEAR REPULSION ENERGY']['HTBH-HS-reagent' ] = 6.31758012 DATA['NUCLEAR REPULSION ENERGY']['HTBH-OHClts-reagent' ] = 38.62988868 DATA['NUCLEAR REPULSION ENERGY']['HTBH-CH3NH2ts-reagent' ] = 33.45955425 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH-reagent' ] = 3.57299934 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH2C2H5ts-reagent' ] = 71.85720179 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C2H6NH2ts-reagent' ] = 78.78495055 DATA['NUCLEAR REPULSION ENERGY']['HTBH-NH2CH4ts-reagent' ] = 39.42842411 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C5H8-reagent' ] = 155.81524012 DATA['NUCLEAR REPULSION ENERGY']['HTBH-C5H8ts-reagent' ] = 164.93671263
import pytest from .addons import using_networkx from .utils import * import math import numpy as np import qcelemental as qcel import psi4 from psi4.driver import qcdb pytestmark = pytest.mark.quick def hide_test_xtpl_fn_fn_error(): psi4.geometry('He') with pytest.raises(psi4.UpgradeHelper) as e: psi4.energy('cbs', scf_basis='cc-pvdz', scf_scheme=psi4.driver_cbs.xtpl_highest_1) assert 'Replace extrapolation function with function name' in str(e.value) def hide_test_xtpl_cbs_fn_error(): psi4.geometry('He') with pytest.raises(psi4.UpgradeHelper) as e: psi4.energy(psi4.cbs, scf_basis='cc-pvdz') #psi4.energy(psi4.driver.driver_cbs.complete_basis_set, scf_basis='cc-pvdz') assert 'Replace cbs or complete_basis_set function with cbs string' in str(e.value) @pytest.mark.parametrize("inp,out", [ ((2, 'C2V'), 2), (('A2', 'c2v'), 2), (('2', 'C2V'), 2), ]) def test_parse_cotton_irreps(inp, out): idx = psi4.driver.driver_util.parse_cotton_irreps(*inp) assert idx == out @pytest.mark.parametrize("inp", [ ((5, 'cs')), (('5', 'cs')), ((0, 'cs')), (('a2', 'cs')), ]) def test_parse_cotton_irreps_error(inp): with pytest.raises(psi4.ValidationError) as e: psi4.driver.driver_util.parse_cotton_irreps(*inp) assert 'not valid for point group' in str(e.value) # <<< TODO Deprecated! Delete in Psi4 v1.5 >>> @using_networkx def test_deprecated_qcdb_align_b787(): soco10 = """ O 1.0 0.0 0.0 C 0.0 0.0 0.0 O -1.0 0.0 0.0 units ang """ sooc12 = """ O 1.2 4.0 0.0 O -1.2 4.0 0.0 C 0.0 4.0 0.0 units ang """ ref_rmsd = math.sqrt(2. * 0.2 * 0.2 / 3.) # RMSD always in Angstroms oco10 = qcel.molparse.from_string(soco10) oco12 = qcel.molparse.from_string(sooc12) oco10_geom_au = oco10['qm']['geom'].reshape((-1, 3)) / qcel.constants.bohr2angstroms oco12_geom_au = oco12['qm']['geom'].reshape((-1, 3)) / qcel.constants.bohr2angstroms with pytest.warns(FutureWarning) as err: rmsd, mill = qcdb.align.B787( oco10_geom_au, oco12_geom_au, np.array(['O', 'C', 'O']), np.array(['O', 'O', 'C']), verbose=4, do_plot=False) assert compare_values(ref_rmsd, rmsd, 6, 'known rmsd B787') def test_deprecated_qcdb_align_scramble(): with pytest.warns(FutureWarning) as err: mill = qcdb.align.compute_scramble(4, do_resort=False, do_shift=False, do_rotate=False, deflection=1.0, do_mirror=False) assert compare_arrays([0,1,2,3], mill.atommap, 4, 'atommap') # <<< TODO Deprecated! Delete when the error messages are removed. >>> def test_deprecated_dcft_calls(): psi4.geometry('He') err_substr = "All instances of 'dcft' should be replaced with 'dct'." driver_calls = [psi4.energy, psi4.optimize, psi4.gradient, psi4.hessian, psi4.frequencies] for call in driver_calls: with pytest.raises(psi4.UpgradeHelper) as e: call('dcft', basis='cc-pvdz') assert err_substr in str(e.value) # The errors trapped below are C-side, so they're nameless, Py-side. with pytest.raises(Exception) as e: psi4.set_module_options('dcft', {'e_convergence': 9}) assert err_substr in str(e.value) with pytest.raises(Exception) as e: psi4.set_module_options('dct', {'dcft_functional': 'odc-06'}) assert err_substr in str(e.value)
CDSherrill/psi4
tests/pytests/test_misc.py
psi4/share/psi4/databases/HTBH.py
# # @BEGIN LICENSE # # Psi4: an open-source quantum chemistry software package # # Copyright (c) 2007-2019 The Psi4 Developers. # # The copyrights for code used from other parties are included in # the corresponding files. # # This file is part of Psi4. # # Psi4 is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, version 3. # # Psi4 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with Psi4; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # @END LICENSE # """Module with commands building :py:class:`~basislist.BasisFamily` objects for Pople and other non-Dunning orbital basis sets. Some plausible fitting basis sets are supplied as defaults. """ from .basislist import * def load_basfam_other(): # Pople basis_sto3g = BasisFamily('STO-3G', zeta=1) basis_321g = BasisFamily('3-21G', zeta=1) basisfamily_list.append(basis_sto3g) basisfamily_list.append(basis_321g) basis_631g = BasisFamily('6-31G', zeta=2) basis_631g_d_ = BasisFamily('6-31G(d)', zeta=2) basis_631g_d_p_ = BasisFamily('6-31G(d,p)', zeta=2) basis_631gs = BasisFamily('6-31G*', '6-31g_d_', zeta=2) basis_631gss = BasisFamily('6-31G**', '6-31g_d_p_', zeta=2) basis_631pg = BasisFamily('6-31+G', zeta=2) basis_631pg_d_ = BasisFamily('6-31+G(d)', zeta=2) basis_631pg_d_p_ = BasisFamily('6-31+G(d,p)', zeta=2) basis_631pgs = BasisFamily('6-31+G*', '6-31pg_d_', zeta=2) basis_631pgss = BasisFamily('6-31+G**', '6-31pg_d_p_', zeta=2) basis_631ppg = BasisFamily('6-31++G', zeta=2) basis_631ppg_d_ = BasisFamily('6-31++G(d)', zeta=2) basis_631ppg_d_p_ = BasisFamily('6-31++G(d,p)', zeta=2) basis_631ppgs = BasisFamily('6-31++G*', '6-31ppg_d_', zeta=2) basis_631ppgss = BasisFamily('6-31++G**', '6-31ppg_d_p_', zeta=2) basisfamily_list.append(basis_631g) basisfamily_list.append(basis_631g_d_) basisfamily_list.append(basis_631g_d_p_) basisfamily_list.append(basis_631gs) basisfamily_list.append(basis_631gss) basisfamily_list.append(basis_631pg) basisfamily_list.append(basis_631pg_d_) basisfamily_list.append(basis_631pg_d_p_) basisfamily_list.append(basis_631pgs) basisfamily_list.append(basis_631pgss) basisfamily_list.append(basis_631ppg) basisfamily_list.append(basis_631ppg_d_) basisfamily_list.append(basis_631ppg_d_p_) basisfamily_list.append(basis_631ppgs) basisfamily_list.append(basis_631ppgss) basis_6311g = BasisFamily('6-311G', zeta=3) basis_6311g_d_ = BasisFamily('6-311G(d)', zeta=3) basis_6311g_d_p_ = BasisFamily('6-311G(d,p)', zeta=3) basis_6311gs = BasisFamily('6-311G*', '6-311g_d_', zeta=3) basis_6311gss = BasisFamily('6-311G**', '6-311g_d_p_', zeta=3) basis_6311g_2d_ = BasisFamily('6-311G(2d)', zeta=3) basis_6311g_2d_p_ = BasisFamily('6-311G(2d,p)', zeta=3) basis_6311g_2d_2p_ = BasisFamily('6-311G(2d,2p)', zeta=3) basis_6311g_2df_ = BasisFamily('6-311G(2df)', zeta=3) basis_6311g_2df_p_ = BasisFamily('6-311G(2df,p)', zeta=3) basis_6311g_2df_2p_ = BasisFamily('6-311G(2df,2p)', zeta=3) basis_6311g_2df_2pd_ = BasisFamily('6-311G(2df,2pd)', zeta=3) basis_6311g_3df_ = BasisFamily('6-311G(3df)', zeta=3) basis_6311g_3df_p_ = BasisFamily('6-311G(3df,p)', zeta=3) basis_6311g_3df_2p_ = BasisFamily('6-311G(3df,2p)', zeta=3) basis_6311g_3df_2pd_ = BasisFamily('6-311G(3df,2pd)', zeta=3) basis_6311g_3df_3pd_ = BasisFamily('6-311G(3df,3pd)', zeta=3) basisfamily_list.append(basis_6311g) basisfamily_list.append(basis_6311g_d_) basisfamily_list.append(basis_6311g_d_p_) basisfamily_list.append(basis_6311gs) basisfamily_list.append(basis_6311gss) basisfamily_list.append(basis_6311g_2d_) basisfamily_list.append(basis_6311g_2d_p_) basisfamily_list.append(basis_6311g_2d_2p_) basisfamily_list.append(basis_6311g_2df_) basisfamily_list.append(basis_6311g_2df_p_) basisfamily_list.append(basis_6311g_2df_2p_) basisfamily_list.append(basis_6311g_2df_2pd_) basisfamily_list.append(basis_6311g_3df_) basisfamily_list.append(basis_6311g_3df_p_) basisfamily_list.append(basis_6311g_3df_2p_) basisfamily_list.append(basis_6311g_3df_2pd_) basisfamily_list.append(basis_6311g_3df_3pd_) basis_6311pg = BasisFamily('6-311+G', zeta=3) basis_6311pg_d_ = BasisFamily('6-311+G(d)', zeta=3) basis_6311pg_d_p_ = BasisFamily('6-311+G(d,p)', zeta=3) basis_6311pgs = BasisFamily('6-311+G*', '6-311pg_d_', zeta=3) basis_6311pgss = BasisFamily('6-311+G**', '6-311pg_d_p_', zeta=3) basis_6311pg_2d_ = BasisFamily('6-311+G(2d)', zeta=3) basis_6311pg_2d_p_ = BasisFamily('6-311+G(2d,p)', zeta=3) basis_6311pg_2d_2p_ = BasisFamily('6-311+G(2d,2p)', zeta=3) basis_6311pg_2df_ = BasisFamily('6-311+G(2df)', zeta=3) basis_6311pg_2df_p_ = BasisFamily('6-311+G(2df,p)', zeta=3) basis_6311pg_2df_2p_ = BasisFamily('6-311+G(2df,2p)', zeta=3) basis_6311pg_2df_2pd_ = BasisFamily('6-311+G(2df,2pd)', zeta=3) basis_6311pg_3df_ = BasisFamily('6-311+G(3df)', zeta=3) basis_6311pg_3df_p_ = BasisFamily('6-311+G(3df,p)', zeta=3) basis_6311pg_3df_2p_ = BasisFamily('6-311+G(3df,2p)', zeta=3) basis_6311pg_3df_2pd_ = BasisFamily('6-311+G(3df,2pd)', zeta=3) basis_6311pg_3df_3pd_ = BasisFamily('6-311+G(3df,3pd)', zeta=3) basisfamily_list.append(basis_6311pg) basisfamily_list.append(basis_6311pg_d_) basisfamily_list.append(basis_6311pg_d_p_) basisfamily_list.append(basis_6311pgs) basisfamily_list.append(basis_6311pgss) basisfamily_list.append(basis_6311pg_2d_) basisfamily_list.append(basis_6311pg_2d_p_) basisfamily_list.append(basis_6311pg_2d_2p_) basisfamily_list.append(basis_6311pg_2df_) basisfamily_list.append(basis_6311pg_2df_p_) basisfamily_list.append(basis_6311pg_2df_2p_) basisfamily_list.append(basis_6311pg_2df_2pd_) basisfamily_list.append(basis_6311pg_3df_) basisfamily_list.append(basis_6311pg_3df_p_) basisfamily_list.append(basis_6311pg_3df_2p_) basisfamily_list.append(basis_6311pg_3df_2pd_) basisfamily_list.append(basis_6311pg_3df_3pd_) basis_6311ppg = BasisFamily('6-311++G', zeta=3) basis_6311ppg_d_ = BasisFamily('6-311++G(d)', zeta=3) basis_6311ppg_d_p_ = BasisFamily('6-311++G(d,p)', zeta=3) basis_6311ppgs = BasisFamily('6-311++G*', '6-311ppg_d_', zeta=3) basis_6311ppgss = BasisFamily('6-311++G**', '6-311ppg_d_p_', zeta=3) basis_6311ppg_2d_ = BasisFamily('6-311++G(2d)', zeta=3) basis_6311ppg_2d_p_ = BasisFamily('6-311++G(2d,p)', zeta=3) basis_6311ppg_2d_2p_ = BasisFamily('6-311++G(2d,2p)', zeta=3) basis_6311ppg_2df_ = BasisFamily('6-311++G(2df)', zeta=3) basis_6311ppg_2df_p_ = BasisFamily('6-311++G(2df,p)', zeta=3) basis_6311ppg_2df_2p_ = BasisFamily('6-311++G(2df,2p)', zeta=3) basis_6311ppg_2df_2pd_ = BasisFamily('6-311++G(2df,2pd)', zeta=3) basis_6311ppg_3df_ = BasisFamily('6-311++G(3df)', zeta=3) basis_6311ppg_3df_p_ = BasisFamily('6-311++G(3df,p)', zeta=3) basis_6311ppg_3df_2p_ = BasisFamily('6-311++G(3df,2p)', zeta=3) basis_6311ppg_3df_2pd_ = BasisFamily('6-311++G(3df,2pd)', zeta=3) basis_6311ppg_3df_3pd_ = BasisFamily('6-311++G(3df,3pd)', zeta=3) basisfamily_list.append(basis_6311ppg) basisfamily_list.append(basis_6311ppg_d_) basisfamily_list.append(basis_6311ppg_d_p_) basisfamily_list.append(basis_6311ppgs) basisfamily_list.append(basis_6311ppgss) basisfamily_list.append(basis_6311ppg_2d_) basisfamily_list.append(basis_6311ppg_2d_p_) basisfamily_list.append(basis_6311ppg_2d_2p_) basisfamily_list.append(basis_6311ppg_2df_) basisfamily_list.append(basis_6311ppg_2df_p_) basisfamily_list.append(basis_6311ppg_2df_2p_) basisfamily_list.append(basis_6311ppg_2df_2pd_) basisfamily_list.append(basis_6311ppg_3df_) basisfamily_list.append(basis_6311ppg_3df_p_) basisfamily_list.append(basis_6311ppg_3df_2p_) basisfamily_list.append(basis_6311ppg_3df_2pd_) basisfamily_list.append(basis_6311ppg_3df_3pd_) # Ahlrichs basis_def2sv_p_ = BasisFamily('def2-SV(P)', zeta=2) basis_def2msvp = BasisFamily('def2-mSVP', zeta=2) basis_def2svp = BasisFamily('def2-SVP', zeta=2) basis_def2svpd = BasisFamily('def2-SVPD', zeta=2) basis_def2tzvp = BasisFamily('def2-TZVP', zeta=3) basis_def2tzvpd = BasisFamily('def2-TZVPD', zeta=3) basis_def2tzvpp = BasisFamily('def2-TZVPP', zeta=3) basis_def2tzvppd = BasisFamily('def2-TZVPPD', zeta=3) basis_def2qzvp = BasisFamily('def2-QZVP', zeta=4) basis_def2qzvpd = BasisFamily('def2-QZVPD', zeta=4) basis_def2qzvpp = BasisFamily('def2-QZVPP', zeta=4) basis_def2qzvppd = BasisFamily('def2-QZVPPD', zeta=4) basis_def2sv_p_.add_jfit('def2-universal-JFIT') basis_def2msvp.add_jfit('def2-universal-JFIT') basis_def2svp.add_jfit('def2-universal-JFIT') basis_def2svpd.add_jfit('def2-universal-JFIT') basis_def2tzvp.add_jfit('def2-universal-JFIT') basis_def2tzvpd.add_jfit('def2-universal-JFIT') basis_def2tzvpp.add_jfit('def2-universal-JFIT') basis_def2tzvppd.add_jfit('def2-universal-JFIT') basis_def2qzvp.add_jfit('def2-universal-JFIT') basis_def2qzvpd.add_jfit('def2-universal-JFIT') basis_def2qzvpp.add_jfit('def2-universal-JFIT') basis_def2qzvppd.add_jfit('def2-universal-JFIT') basis_def2sv_p_.add_jkfit('def2-universal-JKFIT') basis_def2msvp.add_jkfit('def2-universal-JKFIT') basis_def2svp.add_jkfit('def2-universal-JKFIT') basis_def2svpd.add_jkfit('def2-universal-JKFIT') basis_def2tzvp.add_jkfit('def2-universal-JKFIT') basis_def2tzvpd.add_jkfit('def2-universal-JKFIT') basis_def2tzvpp.add_jkfit('def2-universal-JKFIT') basis_def2tzvppd.add_jkfit('def2-universal-JKFIT') basis_def2qzvp.add_jkfit('def2-universal-JKFIT') basis_def2qzvpd.add_jkfit('def2-universal-JKFIT') basis_def2qzvpp.add_jkfit('def2-universal-JKFIT') basis_def2qzvppd.add_jkfit('def2-universal-JKFIT') basis_def2sv_p_.add_rifit('def2-SV(P)-RI') basis_def2msvp.add_rifit('def2-SVP-RI') basis_def2svp.add_rifit('def2-SVP-RI') basis_def2svpd.add_rifit('def2-SVPD-RI') basis_def2tzvp.add_rifit('def2-TZVP-RI') basis_def2tzvpd.add_rifit('def2-TZVPD-RI') basis_def2tzvpp.add_rifit('def2-TZVPP-RI') basis_def2tzvppd.add_rifit('def2-TZVPPD-RI') basis_def2qzvp.add_rifit('def2-QZVP-RI') basis_def2qzvpp.add_rifit('def2-QZVPP-RI') basis_def2qzvppd.add_rifit('def2-QZVPPD-RI') basisfamily_list.append(basis_def2sv_p_) basisfamily_list.append(basis_def2msvp) basisfamily_list.append(basis_def2svp) basisfamily_list.append(basis_def2svpd) basisfamily_list.append(basis_def2tzvp) basisfamily_list.append(basis_def2tzvpd) basisfamily_list.append(basis_def2tzvpp) basisfamily_list.append(basis_def2tzvppd) basisfamily_list.append(basis_def2qzvp) basisfamily_list.append(basis_def2qzvpd) basisfamily_list.append(basis_def2qzvpp) basisfamily_list.append(basis_def2qzvppd) # Jensen basis_augpcseg0 = BasisFamily('aug-pcseg-0', zeta=1) basis_augpcseg1 = BasisFamily('aug-pcseg-1', zeta=2) basis_augpcseg2 = BasisFamily('aug-pcseg-2', zeta=3) basis_augpcseg3 = BasisFamily('aug-pcseg-3', zeta=4) basis_augpcseg4 = BasisFamily('aug-pcseg-4', zeta=5) basis_augpcsseg0 = BasisFamily('aug-pcSseg-0', zeta=1) basis_augpcsseg1 = BasisFamily('aug-pcSseg-1', zeta=2) basis_augpcsseg2 = BasisFamily('aug-pcSseg-2', zeta=3) basis_augpcsseg3 = BasisFamily('aug-pcSseg-3', zeta=4) basis_augpcsseg4 = BasisFamily('aug-pcSseg-4', zeta=5) basis_pcseg0 = BasisFamily('pcseg-0', zeta=1) basis_pcseg1 = BasisFamily('pcseg-1', zeta=2) basis_pcseg2 = BasisFamily('pcseg-2', zeta=3) basis_pcseg3 = BasisFamily('pcseg-3', zeta=4) basis_pcseg4 = BasisFamily('pcseg-4', zeta=5) basis_pcsseg0 = BasisFamily('pcSseg-0', zeta=1) basis_pcsseg1 = BasisFamily('pcSseg-1', zeta=2) basis_pcsseg2 = BasisFamily('pcSseg-2', zeta=3) basis_pcsseg3 = BasisFamily('pcSseg-3', zeta=4) basis_pcsseg4 = BasisFamily('pcSseg-4', zeta=5) # Here lie practical (non-validated) fitting bases for # Jensen orbital basis sets basis_augpcseg0.add_jfit('def2-universal-JFIT') basis_augpcseg1.add_jfit('def2-universal-JFIT') basis_augpcseg2.add_jfit('def2-universal-JFIT') basis_augpcseg3.add_jfit('def2-universal-JFIT') basis_augpcsseg0.add_jfit('def2-universal-JFIT') basis_augpcsseg1.add_jfit('def2-universal-JFIT') basis_augpcsseg2.add_jfit('def2-universal-JFIT') basis_augpcsseg3.add_jfit('def2-universal-JFIT') basis_pcseg0.add_jfit('def2-universal-JFIT') basis_pcseg1.add_jfit('def2-universal-JFIT') basis_pcseg2.add_jfit('def2-universal-JFIT') basis_pcseg3.add_jfit('def2-universal-JFIT') basis_pcsseg0.add_jfit('def2-universal-JFIT') basis_pcsseg1.add_jfit('def2-universal-JFIT') basis_pcsseg2.add_jfit('def2-universal-JFIT') basis_pcsseg3.add_jfit('def2-universal-JFIT') basis_augpcseg0.add_jkfit('def2-universal-JKFIT') basis_augpcseg1.add_jkfit('def2-universal-JKFIT') basis_augpcseg2.add_jkfit('def2-universal-JKFIT') basis_augpcseg3.add_jkfit('def2-universal-JKFIT') basis_augpcseg4.add_jkfit('aug-cc-pV5Z-JKFIT') basis_augpcsseg0.add_jkfit('def2-universal-JKFIT') basis_augpcsseg1.add_jkfit('def2-universal-JKFIT') basis_augpcsseg2.add_jkfit('def2-universal-JKFIT') basis_augpcsseg3.add_jkfit('def2-universal-JKFIT') basis_augpcsseg4.add_jkfit('aug-cc-pV5Z-JKFIT') basis_pcseg0.add_jkfit('def2-universal-JKFIT') basis_pcseg1.add_jkfit('def2-universal-JKFIT') basis_pcseg2.add_jkfit('def2-universal-JKFIT') basis_pcseg3.add_jkfit('def2-universal-JKFIT') basis_pcseg4.add_jkfit('cc-pV5Z-JKFIT') basis_pcsseg0.add_jkfit('def2-universal-JKFIT') basis_pcsseg1.add_jkfit('def2-universal-JKFIT') basis_pcsseg2.add_jkfit('def2-universal-JKFIT') basis_pcsseg3.add_jkfit('def2-universal-JKFIT') basis_pcsseg4.add_jkfit('cc-pV5Z-JKFIT') basis_augpcseg0.add_rifit('def2-SV(P)-RI') basis_augpcseg1.add_rifit('def2-SVPD-RI') basis_augpcseg2.add_rifit('def2-TZVPPD-RI') basis_augpcseg3.add_rifit('def2-QZVPPD-RI') basis_augpcseg4.add_rifit('aug-cc-pV5Z-RI') basis_augpcsseg0.add_rifit('def2-SV(P)-RI') basis_augpcsseg1.add_rifit('def2-SVPD-RI') basis_augpcsseg2.add_rifit('def2-TZVPPD-RI') basis_augpcsseg3.add_rifit('def2-QZVPPD-RI') basis_augpcsseg4.add_rifit('aug-cc-pwCV5Z-RI') basis_pcseg0.add_rifit('def2-SV(P)-RI') basis_pcseg1.add_rifit('def2-SVP-RI') basis_pcseg2.add_rifit('def2-TZVPP-RI') basis_pcseg3.add_rifit('def2-QZVPP-RI') basis_pcseg4.add_rifit('cc-pV5Z-RI') basis_pcsseg0.add_rifit('def2-SV(P)-RI') basis_pcsseg1.add_rifit('def2-SVP-RI') basis_pcsseg2.add_rifit('def2-TZVPP-RI') basis_pcsseg3.add_rifit('def2-QZVPP-RI') basis_pcsseg4.add_rifit('cc-pwCV5Z-RI') basisfamily_list.append(basis_augpcseg0) basisfamily_list.append(basis_augpcseg1) basisfamily_list.append(basis_augpcseg2) basisfamily_list.append(basis_augpcseg3) basisfamily_list.append(basis_augpcseg4) basisfamily_list.append(basis_augpcsseg0) basisfamily_list.append(basis_augpcsseg1) basisfamily_list.append(basis_augpcsseg2) basisfamily_list.append(basis_augpcsseg3) basisfamily_list.append(basis_augpcsseg4) basisfamily_list.append(basis_pcseg0) basisfamily_list.append(basis_pcseg1) basisfamily_list.append(basis_pcseg2) basisfamily_list.append(basis_pcseg3) basisfamily_list.append(basis_pcseg4) basisfamily_list.append(basis_pcsseg0) basisfamily_list.append(basis_pcsseg1) basisfamily_list.append(basis_pcsseg2) basisfamily_list.append(basis_pcsseg3) basisfamily_list.append(basis_pcsseg4) # Minix basis_minix = BasisFamily('minix', zeta=2) basis_minix.add_jfit('def2-universal-JFIT') basis_minix.add_jkfit('def2-universal-JKFIT') basis_minix.add_rifit('def2-SVP-RI') basisfamily_list.append(basis_minix) # Others basis_dz = BasisFamily('DZ') basis_dzp = BasisFamily('DZP') basis_dzvp = BasisFamily('DZVP') basis_psi3dzp = BasisFamily('psi3-DZP') basis_psi3tz2p = BasisFamily('psi3-TZ2P') basis_psi3tz2pf = BasisFamily('psi3-TZ2PF') basis_sadlejlpoldl = BasisFamily('sadlej-lpol-dl') basis_sadlejlpolds = BasisFamily('sadlej-lpol-ds') basis_sadlejlpolfl = BasisFamily('sadlej-lpol-fl') basis_sadlejlpolfs = BasisFamily('sadlej-lpol-fs') basisfamily_list.append(basis_dz) basisfamily_list.append(basis_dzp) basisfamily_list.append(basis_dzvp) basisfamily_list.append(basis_psi3dzp) basisfamily_list.append(basis_psi3tz2p) basisfamily_list.append(basis_psi3tz2pf) basisfamily_list.append(basis_sadlejlpoldl) basisfamily_list.append(basis_sadlejlpolds) basisfamily_list.append(basis_sadlejlpolfl) basisfamily_list.append(basis_sadlejlpolfs) # Here lie practical (non-validated) fitting bases for # Pople orbital basis sets basis_sto3g.add_jkfit('def2-universal-JKFIT') basis_sto3g.add_rifit('def2-SVP-RIFIT') basis_321g.add_jkfit('def2-universal-JKFIT') basis_321g.add_rifit('def2-SVP-RIFIT') basis_631g.add_jkfit('cc-pvdz-jkfit') basis_631g_d_.add_jkfit('cc-pvdz-jkfit') basis_631g_d_p_.add_jkfit('cc-pvdz-jkfit') basis_631gs.add_jkfit('cc-pvdz-jkfit') basis_631gss.add_jkfit('cc-pvdz-jkfit') basis_631g.add_rifit('cc-pvdz-ri') basis_631g_d_.add_rifit('cc-pvdz-ri') basis_631g_d_p_.add_rifit('cc-pvdz-ri') basis_631gs.add_rifit('cc-pvdz-ri') basis_631gss.add_rifit('cc-pvdz-ri') basis_631pg.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pg_d_.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pg_d_p_.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pgs.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pgss.add_jkfit('heavy-aug-cc-pvdz-jkfit') basis_631pg.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pg_d_.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pg_d_p_.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pgs.add_rifit('heavy-aug-cc-pvdz-ri') basis_631pgss.add_rifit('heavy-aug-cc-pvdz-ri') basis_631ppg.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppg_d_.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppg_d_p_.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppgs.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppgss.add_jkfit('aug-cc-pvdz-jkfit') basis_631ppg.add_rifit('aug-cc-pvdz-ri') basis_631ppg_d_.add_rifit('aug-cc-pvdz-ri') basis_631ppg_d_p_.add_rifit('aug-cc-pvdz-ri') basis_631ppgs.add_rifit('aug-cc-pvdz-ri') basis_631ppgss.add_rifit('aug-cc-pvdz-ri') basis_6311g.add_jkfit('cc-pvtz-jkfit') basis_6311g_d_.add_jkfit('cc-pvtz-jkfit') basis_6311g_d_p_.add_jkfit('cc-pvtz-jkfit') basis_6311gs.add_jkfit('cc-pvtz-jkfit') basis_6311gss.add_jkfit('cc-pvtz-jkfit') basis_6311g_2d_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2d_p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2d_2p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_2p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_2df_2pd_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_2p_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_2pd_.add_jkfit('cc-pvtz-jkfit') basis_6311g_3df_3pd_.add_jkfit('cc-pvtz-jkfit') basis_6311g.add_rifit('cc-pvtz-ri') basis_6311g_d_.add_rifit('cc-pvtz-ri') basis_6311g_d_p_.add_rifit('cc-pvtz-ri') basis_6311gs.add_rifit('cc-pvtz-ri') basis_6311gss.add_rifit('cc-pvtz-ri') basis_6311g_2d_.add_rifit('cc-pvtz-ri') basis_6311g_2d_p_.add_rifit('cc-pvtz-ri') basis_6311g_2d_2p_.add_rifit('cc-pvtz-ri') basis_6311g_2df_.add_rifit('cc-pvtz-ri') basis_6311g_2df_p_.add_rifit('cc-pvtz-ri') basis_6311g_2df_2p_.add_rifit('cc-pvtz-ri') basis_6311g_2df_2pd_.add_rifit('cc-pvtz-ri') basis_6311g_3df_.add_rifit('cc-pvtz-ri') basis_6311g_3df_p_.add_rifit('cc-pvtz-ri') basis_6311g_3df_2p_.add_rifit('cc-pvtz-ri') basis_6311g_3df_2pd_.add_rifit('cc-pvtz-ri') basis_6311g_3df_3pd_.add_rifit('cc-pvtz-ri') basis_6311pg.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_d_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_d_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pgs.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pgss.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2d_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2d_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2d_2p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_2p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_2df_2pd_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_2p_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_2pd_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg_3df_3pd_.add_jkfit('heavy-aug-cc-pvtz-jkfit') basis_6311pg.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_d_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_d_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pgs.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pgss.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2d_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2d_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2d_2p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_2p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_2df_2pd_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_2p_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_2pd_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311pg_3df_3pd_.add_rifit('heavy-aug-cc-pvtz-ri') basis_6311ppg.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_d_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_d_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppgs.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppgss.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2d_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2d_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2d_2p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_2p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_2df_2pd_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_2p_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_2pd_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg_3df_3pd_.add_jkfit('aug-cc-pvtz-jkfit') basis_6311ppg.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_d_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_d_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppgs.add_rifit('aug-cc-pvtz-ri') basis_6311ppgss.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2d_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2d_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2d_2p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_2p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_2df_2pd_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_2p_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_2pd_.add_rifit('aug-cc-pvtz-ri') basis_6311ppg_3df_3pd_.add_rifit('aug-cc-pvtz-ri') # Petersson's nZaPa-NR basis sets basis_2zapa_nr = BasisFamily('2zapa-nr',zeta=2) basis_3zapa_nr = BasisFamily('3zapa-nr',zeta=3) basis_4zapa_nr = BasisFamily('4zapa-nr',zeta=4) basis_5zapa_nr = BasisFamily('5zapa-nr',zeta=5) basis_6zapa_nr = BasisFamily('6zapa-nr',zeta=6) basis_7zapa_nr = BasisFamily('7zapa-nr',zeta=7) # fitting sets for nZaPa-NR # Dunnings zeta+1 to be safe, tested on water dimer # the full aug-JKFIT is possibly too much #--------SCF-JKFIT error for nZaPa-NR # results for GS energies of water dimer: # delta_jk = E_conv - E_DFJK # ZaPa zeta 2 : delta_jk = -0.000009 # ZaPa zeta 3 : delta_jk = -0.000002 # ZaPa zeta 4 : delta_jk = -0.000002 # ZaPa zeta 5 : delta_jk = -0.000002 # ZaPa zeta 6 : delta_jk = 0.000000 # ZaPa zeta 7 : delta_jk = 0.000000 basis_2zapa_nr.add_jkfit('aug-cc-pvtz-jkfit') basis_3zapa_nr.add_jkfit('aug-cc-pvqz-jkfit') basis_4zapa_nr.add_jkfit('aug-cc-pv5z-jkfit') basis_5zapa_nr.add_jkfit('aug-cc-pv5z-jkfit') basis_6zapa_nr.add_jkfit('aug-cc-pv6z-ri') basis_7zapa_nr.add_jkfit('aug-cc-pv6z-ri') basis_2zapa_nr.add_rifit('aug-cc-pvtz-ri') basis_3zapa_nr.add_rifit('aug-cc-pvqz-ri') basis_4zapa_nr.add_rifit('aug-cc-pv5z-ri') basis_5zapa_nr.add_rifit('aug-cc-pv6z-ri') basis_6zapa_nr.add_rifit('aug-cc-pv6z-ri') basis_7zapa_nr.add_rifit('aug-cc-pv6z-ri') basisfamily_list.append(basis_2zapa_nr) basisfamily_list.append(basis_3zapa_nr) basisfamily_list.append(basis_4zapa_nr) basisfamily_list.append(basis_5zapa_nr) basisfamily_list.append(basis_6zapa_nr) basisfamily_list.append(basis_7zapa_nr) # F12 basis sets basis_cc_pvdz_f12 = BasisFamily('cc-pvdz-f12',zeta=2) basis_cc_pvtz_f12 = BasisFamily('cc-pvtz-f12',zeta=3) basis_cc_pvqz_f12 = BasisFamily('cc-pvqz-f12',zeta=4) # basis_cc_pv5z_f12 = BasisFamily('cc-pV5Z-F12') # ORCA manual suggests for F12 basis sets Dunning's zeta+1 basis_cc_pvdz_f12.add_jkfit('cc-pvtz-jkfit') basis_cc_pvtz_f12.add_jkfit('cc-pvqz-jkfit') basis_cc_pvqz_f12.add_jkfit('cc-pv5z-jkfit') basis_cc_pvdz_f12.add_rifit('cc-pvtz-ri') basis_cc_pvtz_f12.add_rifit('cc-pvqz-ri') basis_cc_pvqz_f12.add_rifit('cc-pv5z-ri') basisfamily_list.append(basis_cc_pvqz_f12) basisfamily_list.append(basis_cc_pvtz_f12) basisfamily_list.append(basis_cc_pvqz_f12) # basisfamily_list.append(basis_cc_pv5z_f12)
import pytest from .addons import using_networkx from .utils import * import math import numpy as np import qcelemental as qcel import psi4 from psi4.driver import qcdb pytestmark = pytest.mark.quick def hide_test_xtpl_fn_fn_error(): psi4.geometry('He') with pytest.raises(psi4.UpgradeHelper) as e: psi4.energy('cbs', scf_basis='cc-pvdz', scf_scheme=psi4.driver_cbs.xtpl_highest_1) assert 'Replace extrapolation function with function name' in str(e.value) def hide_test_xtpl_cbs_fn_error(): psi4.geometry('He') with pytest.raises(psi4.UpgradeHelper) as e: psi4.energy(psi4.cbs, scf_basis='cc-pvdz') #psi4.energy(psi4.driver.driver_cbs.complete_basis_set, scf_basis='cc-pvdz') assert 'Replace cbs or complete_basis_set function with cbs string' in str(e.value) @pytest.mark.parametrize("inp,out", [ ((2, 'C2V'), 2), (('A2', 'c2v'), 2), (('2', 'C2V'), 2), ]) def test_parse_cotton_irreps(inp, out): idx = psi4.driver.driver_util.parse_cotton_irreps(*inp) assert idx == out @pytest.mark.parametrize("inp", [ ((5, 'cs')), (('5', 'cs')), ((0, 'cs')), (('a2', 'cs')), ]) def test_parse_cotton_irreps_error(inp): with pytest.raises(psi4.ValidationError) as e: psi4.driver.driver_util.parse_cotton_irreps(*inp) assert 'not valid for point group' in str(e.value) # <<< TODO Deprecated! Delete in Psi4 v1.5 >>> @using_networkx def test_deprecated_qcdb_align_b787(): soco10 = """ O 1.0 0.0 0.0 C 0.0 0.0 0.0 O -1.0 0.0 0.0 units ang """ sooc12 = """ O 1.2 4.0 0.0 O -1.2 4.0 0.0 C 0.0 4.0 0.0 units ang """ ref_rmsd = math.sqrt(2. * 0.2 * 0.2 / 3.) # RMSD always in Angstroms oco10 = qcel.molparse.from_string(soco10) oco12 = qcel.molparse.from_string(sooc12) oco10_geom_au = oco10['qm']['geom'].reshape((-1, 3)) / qcel.constants.bohr2angstroms oco12_geom_au = oco12['qm']['geom'].reshape((-1, 3)) / qcel.constants.bohr2angstroms with pytest.warns(FutureWarning) as err: rmsd, mill = qcdb.align.B787( oco10_geom_au, oco12_geom_au, np.array(['O', 'C', 'O']), np.array(['O', 'O', 'C']), verbose=4, do_plot=False) assert compare_values(ref_rmsd, rmsd, 6, 'known rmsd B787') def test_deprecated_qcdb_align_scramble(): with pytest.warns(FutureWarning) as err: mill = qcdb.align.compute_scramble(4, do_resort=False, do_shift=False, do_rotate=False, deflection=1.0, do_mirror=False) assert compare_arrays([0,1,2,3], mill.atommap, 4, 'atommap') # <<< TODO Deprecated! Delete when the error messages are removed. >>> def test_deprecated_dcft_calls(): psi4.geometry('He') err_substr = "All instances of 'dcft' should be replaced with 'dct'." driver_calls = [psi4.energy, psi4.optimize, psi4.gradient, psi4.hessian, psi4.frequencies] for call in driver_calls: with pytest.raises(psi4.UpgradeHelper) as e: call('dcft', basis='cc-pvdz') assert err_substr in str(e.value) # The errors trapped below are C-side, so they're nameless, Py-side. with pytest.raises(Exception) as e: psi4.set_module_options('dcft', {'e_convergence': 9}) assert err_substr in str(e.value) with pytest.raises(Exception) as e: psi4.set_module_options('dct', {'dcft_functional': 'odc-06'}) assert err_substr in str(e.value)
CDSherrill/psi4
tests/pytests/test_misc.py
psi4/driver/qcdb/basislistother.py
# flake8: noqa from pandas.core.reshape.concat import concat from pandas.core.reshape.melt import lreshape, melt, wide_to_long from pandas.core.reshape.merge import merge, merge_asof, merge_ordered from pandas.core.reshape.pivot import crosstab, pivot, pivot_table from pandas.core.reshape.reshape import get_dummies from pandas.core.reshape.tile import cut, qcut
""" Note: for naming purposes, most tests are title with as e.g. "test_nlargest_foo" but are implicitly also testing nsmallest_foo. """ from string import ascii_lowercase import numpy as np import pytest import pandas as pd import pandas._testing as tm @pytest.fixture def df_duplicates(): return pd.DataFrame( {"a": [1, 2, 3, 4, 4], "b": [1, 1, 1, 1, 1], "c": [0, 1, 2, 5, 4]}, index=[0, 0, 1, 1, 1], ) @pytest.fixture def df_strings(): return pd.DataFrame( { "a": np.random.permutation(10), "b": list(ascii_lowercase[:10]), "c": np.random.permutation(10).astype("float64"), } ) @pytest.fixture def df_main_dtypes(): return pd.DataFrame( { "group": [1, 1, 2], "int": [1, 2, 3], "float": [4.0, 5.0, 6.0], "string": list("abc"), "category_string": pd.Series(list("abc")).astype("category"), "category_int": [7, 8, 9], "datetime": pd.date_range("20130101", periods=3), "datetimetz": pd.date_range("20130101", periods=3, tz="US/Eastern"), "timedelta": pd.timedelta_range("1 s", periods=3, freq="s"), }, columns=[ "group", "int", "float", "string", "category_string", "category_int", "datetime", "datetimetz", "timedelta", ], ) class TestNLargestNSmallest: # ---------------------------------------------------------------------- # Top / bottom @pytest.mark.parametrize( "order", [ ["a"], ["c"], ["a", "b"], ["a", "c"], ["b", "a"], ["b", "c"], ["a", "b", "c"], ["c", "a", "b"], ["c", "b", "a"], ["b", "c", "a"], ["b", "a", "c"], # dups! ["b", "c", "c"], ], ) @pytest.mark.parametrize("n", range(1, 11)) def test_nlargest_n(self, df_strings, nselect_method, n, order): # GH#10393 df = df_strings if "b" in order: error_msg = ( f"Column 'b' has dtype object, " f"cannot use method '{nselect_method}' with this dtype" ) with pytest.raises(TypeError, match=error_msg): getattr(df, nselect_method)(n, order) else: ascending = nselect_method == "nsmallest" result = getattr(df, nselect_method)(n, order) expected = df.sort_values(order, ascending=ascending).head(n) tm.assert_frame_equal(result, expected) @pytest.mark.parametrize( "columns", [["group", "category_string"], ["group", "string"]] ) def test_nlargest_error(self, df_main_dtypes, nselect_method, columns): df = df_main_dtypes col = columns[1] error_msg = ( f"Column '{col}' has dtype {df[col].dtype}, " f"cannot use method '{nselect_method}' with this dtype" ) # escape some characters that may be in the repr error_msg = ( error_msg.replace("(", "\\(") .replace(")", "\\)") .replace("[", "\\[") .replace("]", "\\]") ) with pytest.raises(TypeError, match=error_msg): getattr(df, nselect_method)(2, columns) def test_nlargest_all_dtypes(self, df_main_dtypes): df = df_main_dtypes df.nsmallest(2, list(set(df) - {"category_string", "string"})) df.nlargest(2, list(set(df) - {"category_string", "string"})) def test_nlargest_duplicates_on_starter_columns(self): # regression test for GH#22752 df = pd.DataFrame({"a": [2, 2, 2, 1, 1, 1], "b": [1, 2, 3, 3, 2, 1]}) result = df.nlargest(4, columns=["a", "b"]) expected = pd.DataFrame( {"a": [2, 2, 2, 1], "b": [3, 2, 1, 3]}, index=[2, 1, 0, 3] ) tm.assert_frame_equal(result, expected) result = df.nsmallest(4, columns=["a", "b"]) expected = pd.DataFrame( {"a": [1, 1, 1, 2], "b": [1, 2, 3, 1]}, index=[5, 4, 3, 0] ) tm.assert_frame_equal(result, expected) def test_nlargest_n_identical_values(self): # GH#15297 df = pd.DataFrame({"a": [1] * 5, "b": [1, 2, 3, 4, 5]}) result = df.nlargest(3, "a") expected = pd.DataFrame({"a": [1] * 3, "b": [1, 2, 3]}, index=[0, 1, 2]) tm.assert_frame_equal(result, expected) result = df.nsmallest(3, "a") expected = pd.DataFrame({"a": [1] * 3, "b": [1, 2, 3]}) tm.assert_frame_equal(result, expected) @pytest.mark.parametrize( "order", [["a", "b", "c"], ["c", "b", "a"], ["a"], ["b"], ["a", "b"], ["c", "b"]], ) @pytest.mark.parametrize("n", range(1, 6)) def test_nlargest_n_duplicate_index(self, df_duplicates, n, order): # GH#13412 df = df_duplicates result = df.nsmallest(n, order) expected = df.sort_values(order).head(n) tm.assert_frame_equal(result, expected) result = df.nlargest(n, order) expected = df.sort_values(order, ascending=False).head(n) tm.assert_frame_equal(result, expected) def test_nlargest_duplicate_keep_all_ties(self): # GH#16818 df = pd.DataFrame( {"a": [5, 4, 4, 2, 3, 3, 3, 3], "b": [10, 9, 8, 7, 5, 50, 10, 20]} ) result = df.nlargest(4, "a", keep="all") expected = pd.DataFrame( { "a": {0: 5, 1: 4, 2: 4, 4: 3, 5: 3, 6: 3, 7: 3}, "b": {0: 10, 1: 9, 2: 8, 4: 5, 5: 50, 6: 10, 7: 20}, } ) tm.assert_frame_equal(result, expected) result = df.nsmallest(2, "a", keep="all") expected = pd.DataFrame( { "a": {3: 2, 4: 3, 5: 3, 6: 3, 7: 3}, "b": {3: 7, 4: 5, 5: 50, 6: 10, 7: 20}, } ) tm.assert_frame_equal(result, expected) def test_nlargest_multiindex_column_lookup(self): # Check whether tuples are correctly treated as multi-level lookups. # GH#23033 df = pd.DataFrame( columns=pd.MultiIndex.from_product([["x"], ["a", "b"]]), data=[[0.33, 0.13], [0.86, 0.25], [0.25, 0.70], [0.85, 0.91]], ) # nsmallest result = df.nsmallest(3, ("x", "a")) expected = df.iloc[[2, 0, 3]] tm.assert_frame_equal(result, expected) # nlargest result = df.nlargest(3, ("x", "b")) expected = df.iloc[[3, 2, 1]] tm.assert_frame_equal(result, expected)
gfyoung/pandas
pandas/tests/frame/methods/test_nlargest.py
pandas/core/reshape/api.py
from pandas.core.arrays.base import ( ExtensionArray, ExtensionOpsMixin, ExtensionScalarOpsMixin, ) from pandas.core.arrays.boolean import BooleanArray from pandas.core.arrays.categorical import Categorical from pandas.core.arrays.datetimes import DatetimeArray from pandas.core.arrays.floating import FloatingArray from pandas.core.arrays.integer import IntegerArray from pandas.core.arrays.interval import IntervalArray from pandas.core.arrays.masked import BaseMaskedArray from pandas.core.arrays.numpy_ import PandasArray, PandasDtype from pandas.core.arrays.period import PeriodArray, period_array from pandas.core.arrays.sparse import SparseArray from pandas.core.arrays.string_ import StringArray from pandas.core.arrays.timedeltas import TimedeltaArray __all__ = [ "ExtensionArray", "ExtensionOpsMixin", "ExtensionScalarOpsMixin", "BaseMaskedArray", "BooleanArray", "Categorical", "DatetimeArray", "FloatingArray", "IntegerArray", "IntervalArray", "PandasArray", "PandasDtype", "PeriodArray", "period_array", "SparseArray", "StringArray", "TimedeltaArray", ]
""" Note: for naming purposes, most tests are title with as e.g. "test_nlargest_foo" but are implicitly also testing nsmallest_foo. """ from string import ascii_lowercase import numpy as np import pytest import pandas as pd import pandas._testing as tm @pytest.fixture def df_duplicates(): return pd.DataFrame( {"a": [1, 2, 3, 4, 4], "b": [1, 1, 1, 1, 1], "c": [0, 1, 2, 5, 4]}, index=[0, 0, 1, 1, 1], ) @pytest.fixture def df_strings(): return pd.DataFrame( { "a": np.random.permutation(10), "b": list(ascii_lowercase[:10]), "c": np.random.permutation(10).astype("float64"), } ) @pytest.fixture def df_main_dtypes(): return pd.DataFrame( { "group": [1, 1, 2], "int": [1, 2, 3], "float": [4.0, 5.0, 6.0], "string": list("abc"), "category_string": pd.Series(list("abc")).astype("category"), "category_int": [7, 8, 9], "datetime": pd.date_range("20130101", periods=3), "datetimetz": pd.date_range("20130101", periods=3, tz="US/Eastern"), "timedelta": pd.timedelta_range("1 s", periods=3, freq="s"), }, columns=[ "group", "int", "float", "string", "category_string", "category_int", "datetime", "datetimetz", "timedelta", ], ) class TestNLargestNSmallest: # ---------------------------------------------------------------------- # Top / bottom @pytest.mark.parametrize( "order", [ ["a"], ["c"], ["a", "b"], ["a", "c"], ["b", "a"], ["b", "c"], ["a", "b", "c"], ["c", "a", "b"], ["c", "b", "a"], ["b", "c", "a"], ["b", "a", "c"], # dups! ["b", "c", "c"], ], ) @pytest.mark.parametrize("n", range(1, 11)) def test_nlargest_n(self, df_strings, nselect_method, n, order): # GH#10393 df = df_strings if "b" in order: error_msg = ( f"Column 'b' has dtype object, " f"cannot use method '{nselect_method}' with this dtype" ) with pytest.raises(TypeError, match=error_msg): getattr(df, nselect_method)(n, order) else: ascending = nselect_method == "nsmallest" result = getattr(df, nselect_method)(n, order) expected = df.sort_values(order, ascending=ascending).head(n) tm.assert_frame_equal(result, expected) @pytest.mark.parametrize( "columns", [["group", "category_string"], ["group", "string"]] ) def test_nlargest_error(self, df_main_dtypes, nselect_method, columns): df = df_main_dtypes col = columns[1] error_msg = ( f"Column '{col}' has dtype {df[col].dtype}, " f"cannot use method '{nselect_method}' with this dtype" ) # escape some characters that may be in the repr error_msg = ( error_msg.replace("(", "\\(") .replace(")", "\\)") .replace("[", "\\[") .replace("]", "\\]") ) with pytest.raises(TypeError, match=error_msg): getattr(df, nselect_method)(2, columns) def test_nlargest_all_dtypes(self, df_main_dtypes): df = df_main_dtypes df.nsmallest(2, list(set(df) - {"category_string", "string"})) df.nlargest(2, list(set(df) - {"category_string", "string"})) def test_nlargest_duplicates_on_starter_columns(self): # regression test for GH#22752 df = pd.DataFrame({"a": [2, 2, 2, 1, 1, 1], "b": [1, 2, 3, 3, 2, 1]}) result = df.nlargest(4, columns=["a", "b"]) expected = pd.DataFrame( {"a": [2, 2, 2, 1], "b": [3, 2, 1, 3]}, index=[2, 1, 0, 3] ) tm.assert_frame_equal(result, expected) result = df.nsmallest(4, columns=["a", "b"]) expected = pd.DataFrame( {"a": [1, 1, 1, 2], "b": [1, 2, 3, 1]}, index=[5, 4, 3, 0] ) tm.assert_frame_equal(result, expected) def test_nlargest_n_identical_values(self): # GH#15297 df = pd.DataFrame({"a": [1] * 5, "b": [1, 2, 3, 4, 5]}) result = df.nlargest(3, "a") expected = pd.DataFrame({"a": [1] * 3, "b": [1, 2, 3]}, index=[0, 1, 2]) tm.assert_frame_equal(result, expected) result = df.nsmallest(3, "a") expected = pd.DataFrame({"a": [1] * 3, "b": [1, 2, 3]}) tm.assert_frame_equal(result, expected) @pytest.mark.parametrize( "order", [["a", "b", "c"], ["c", "b", "a"], ["a"], ["b"], ["a", "b"], ["c", "b"]], ) @pytest.mark.parametrize("n", range(1, 6)) def test_nlargest_n_duplicate_index(self, df_duplicates, n, order): # GH#13412 df = df_duplicates result = df.nsmallest(n, order) expected = df.sort_values(order).head(n) tm.assert_frame_equal(result, expected) result = df.nlargest(n, order) expected = df.sort_values(order, ascending=False).head(n) tm.assert_frame_equal(result, expected) def test_nlargest_duplicate_keep_all_ties(self): # GH#16818 df = pd.DataFrame( {"a": [5, 4, 4, 2, 3, 3, 3, 3], "b": [10, 9, 8, 7, 5, 50, 10, 20]} ) result = df.nlargest(4, "a", keep="all") expected = pd.DataFrame( { "a": {0: 5, 1: 4, 2: 4, 4: 3, 5: 3, 6: 3, 7: 3}, "b": {0: 10, 1: 9, 2: 8, 4: 5, 5: 50, 6: 10, 7: 20}, } ) tm.assert_frame_equal(result, expected) result = df.nsmallest(2, "a", keep="all") expected = pd.DataFrame( { "a": {3: 2, 4: 3, 5: 3, 6: 3, 7: 3}, "b": {3: 7, 4: 5, 5: 50, 6: 10, 7: 20}, } ) tm.assert_frame_equal(result, expected) def test_nlargest_multiindex_column_lookup(self): # Check whether tuples are correctly treated as multi-level lookups. # GH#23033 df = pd.DataFrame( columns=pd.MultiIndex.from_product([["x"], ["a", "b"]]), data=[[0.33, 0.13], [0.86, 0.25], [0.25, 0.70], [0.85, 0.91]], ) # nsmallest result = df.nsmallest(3, ("x", "a")) expected = df.iloc[[2, 0, 3]] tm.assert_frame_equal(result, expected) # nlargest result = df.nlargest(3, ("x", "b")) expected = df.iloc[[3, 2, 1]] tm.assert_frame_equal(result, expected)
gfyoung/pandas
pandas/tests/frame/methods/test_nlargest.py
pandas/core/arrays/__init__.py
"""Handles errors based on something beyond the type. You can match error messages with regular expressions. You can also extend the matching behavior however you like. By default, strings are treated as regex and matched against the message of the error. Functions are passed the error and if the function returns 'truthy', then the error is caught. Usage: from cfme.utils import error with error.expected('foo'): x = 1 raise Exception('oh noes foo happened!') # this will be caught because regex matches with error.expected('foo'): raise Exception('oh noes bar happened!') # this will bubble up because it doesn't match with error.expected('foo'): pass # an error will be thrown because we expected an error but there wasn't one. """ from contextlib import contextmanager import re from multimethods import singledispatch from collections import Callable @singledispatch def match(o, e): """Returns true if the object matches the exception.""" raise NotImplementedError("Don't know how to match {} to an error".format(type(o))) @match.method(type) def _exception(cls_e, e): """Simulates normal except: clauses by matching the exception type""" return isinstance(e, cls_e) @match.method(Callable) def _callable(f, e): """Pass the exception to the callable, if the callable returns truthy, then it's a match.""" return f(e) def regex(expr, e): """Search the message of the exception using the regex expr""" p = re.compile(expr) return p.search(str(e)) @match.method(basestring) def _str(s, e): """Treat string as a regex and match it against the Exception's message.""" return regex(s, e) class UnexpectedSuccessException(Exception): """An error that is thrown when something we expected to fail didn't fail.""" pass @contextmanager def handler(f): """Handles errors based on more than just their type. Any matching error will be caught, the rest will be allowed to propagate up the stack.""" try: yield except Exception as e: if not match(f, e): raise e @contextmanager def expected(f): """Inverts error handling. If the enclosed block doesn't raise an error, it will raise one. If it raises a matching error, it will return normally. If it raises a non-matching error, that error will be allowed to propagate up the stack. """ try: yield raise UnexpectedSuccessException( "Expected error matching '{}' but got success instead.".format(f)) except UnexpectedSuccessException: raise except Exception as e: if not match(f, e): raise e
# -*- coding: utf-8 -*- """ Tests checking the basic functionality of the Control/Explorer section. Whether we can create/update/delete/assign/... these objects. Nothing with deep meaning. Can be also used as a unit-test for page model coverage. """ import random from collections import namedtuple import fauxfactory import pytest from cfme import test_requirements from cfme.control.explorer import alert_profiles, conditions, policies from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ from cfme.utils.update import update from cfme.utils.version import current_version pytestmark = [ pytest.mark.long_running, test_requirements.control ] EXPRESSIONS_TO_TEST = [ ( "Field", "fill_field({} : Last Compliance Timestamp, BEFORE, 03/04/2014)", '{} : Last Compliance Timestamp BEFORE "03/04/2014 00:00"' ), ( "Count", "fill_count({}.Compliance History, >, 0)", 'COUNT OF {}.Compliance History > 0' ), ( "Tag", "fill_tag({}.User.My Company Tags : Location, Chicago)", "{}.User.My Company Tags : Location CONTAINS 'Chicago'" ), ( "Find", "fill_find({}.Compliance History : Event Type, INCLUDES, some_string, Check Any," "Resource Type, =, another_string)", 'FIND {}.Compliance History : Event Type INCLUDES "some_string" CHECK ANY Resource Type' ' = "another_string"' ) ] COMPLIANCE_POLICIES = [ policies.HostCompliancePolicy, policies.VMCompliancePolicy, policies.ReplicatorCompliancePolicy, policies.PodCompliancePolicy, policies.ContainerNodeCompliancePolicy, policies.ContainerImageCompliancePolicy, ] CONTROL_POLICIES = [ policies.HostControlPolicy, policies.VMControlPolicy, policies.ReplicatorControlPolicy, policies.PodControlPolicy, policies.ContainerNodeControlPolicy, policies.ContainerImageControlPolicy ] POLICIES = COMPLIANCE_POLICIES + CONTROL_POLICIES CONDITIONS = [ conditions.HostCondition, conditions.VMCondition, conditions.ReplicatorCondition, conditions.PodCondition, conditions.ContainerNodeCondition, conditions.ContainerImageCondition, conditions.ProviderCondition ] PolicyAndCondition = namedtuple('PolicyAndCondition', ['name', 'policy', 'condition']) POLICIES_AND_CONDITIONS = [ PolicyAndCondition(name=obj[0].__name__, policy=obj[0], condition=obj[1]) for obj in zip(CONTROL_POLICIES, CONDITIONS) ] EVENTS = [ "Datastore Analysis Complete", "Datastore Analysis Request", "Host Auth Changed", "Host Auth Error", "Host Auth Incomplete Credentials", "Host Auth Invalid", "Host Auth Unreachable", "Host Auth Valid", "Provider Auth Changed", "Provider Auth Error", "Provider Auth Incomplete Credentials", "Provider Auth Invalid", "Provider Auth Unreachable", "Provider Auth Valid", "Tag Complete", "Tag Parent Cluster Complete", "Tag Parent Datastore Complete", "Tag Parent Host Complete", "Tag Parent Resource Pool Complete", "Tag Request", "Un-Tag Complete", "Un-Tag Parent Cluster Complete", "Un-Tag Parent Datastore Complete", "Un-Tag Parent Host Complete", "Un-Tag Parent Resource Pool Complete", "Un-Tag Request", "Container Image Compliance Failed", "Container Image Compliance Passed", "Container Node Compliance Failed", "Container Node Compliance Passed", "Host Compliance Failed", "Host Compliance Passed", "Pod Compliance Failed", "Pod Compliance Passed", "Replicator Compliance Failed", "Replicator Compliance Passed", "VM Compliance Failed", "VM Compliance Passed", "Container Image Analysis Complete", "Container Image Discovered", "Container Node Failed Mount", "Container Node Invalid Disk Capacity", "Container Node Not Ready", "Container Node Not Schedulable", "Container Node Ready", "Container Node Rebooted", "Container Node Schedulable", "Pod Deadline Exceeded", "Pod Failed Scheduling", "Pod Failed Sync", "Pod Failed Validation", "Pod Insufficient Free CPU", "Pod Insufficient Free Memory", "Pod Out of Disk", "Pod Scheduled", "Pod hostPort Conflict", "Pod nodeSelector Mismatching", "Replicator Failed Creating Pod", "Replicator Successfully Created Pod", "Host Added to Cluster", "Host Analysis Complete", "Host Analysis Request", "Host Connect", "Host Disconnect", "Host Maintenance Enter Request", "Host Maintenance Exit Request", "Host Provision Complete", "Host Reboot Request", "Host Removed from Cluster", "Host Reset Request", "Host Shutdown Request", "Host Standby Request", "Host Start Request", "Host Stop Request", "Host Vmotion Disable Request", "Host Vmotion Enable Request", "Service Provision Complete", "Service Retire Request", "Service Retired", "Service Retirement Warning", "Service Start Request", "Service Started", "Service Stop Request", "Service Stopped", "VM Clone Complete", "VM Clone Start", "VM Create Complete", "VM Delete (from Disk) Request", "VM Renamed Event", "VM Settings Change", "VM Template Create Complete", "VM Provision Complete", "VM Retire Request", "VM Retired", "VM Retirement Warning", "VM Analysis Complete", "VM Analysis Failure", "VM Analysis Request", "VM Analysis Start", "VM Guest Reboot", "VM Guest Reboot Request", "VM Guest Shutdown", "VM Guest Shutdown Request", "VM Live Migration (VMOTION)", "VM Pause", "VM Pause Request", "VM Power Off", "VM Power Off Request", "VM Power On", "VM Power On Request", "VM Remote Console Connected", "VM Removal from Inventory", "VM Removal from Inventory Request", "VM Reset", "VM Reset Request", "VM Resume", "VM Shelve", "VM Shelve Offload", "VM Shelve Offload Request", "VM Shelve Request", "VM Snapshot Create Complete", "VM Snapshot Create Request", "VM Snapshot Create Started", "VM Standby of Guest", "VM Standby of Guest Request", "VM Suspend", "VM Suspend Request" ] ALERT_PROFILES = [ alert_profiles.ClusterAlertProfile, alert_profiles.DatastoreAlertProfile, alert_profiles.HostAlertProfile, alert_profiles.ProviderAlertProfile, alert_profiles.ServerAlertProfile, alert_profiles.VMInstanceAlertProfile ] @pytest.fixture(scope="module") def policy_profile_collection(appliance): return appliance.collections.policy_profiles @pytest.fixture(scope="module") def policy_collection(appliance): return appliance.collections.policies @pytest.fixture(scope="module") def condition_collection(appliance): return appliance.collections.conditions @pytest.fixture(scope="module") def action_collection(appliance): return appliance.collections.actions @pytest.fixture(scope="module") def alert_collection(appliance): return appliance.collections.alerts @pytest.fixture(scope="module") def alert_profile_collection(appliance): return appliance.collections.alert_profiles @pytest.yield_fixture def two_random_policies(policy_collection): policy_1 = policy_collection.create( random.choice(POLICIES), fauxfactory.gen_alphanumeric() ) policy_2 = policy_collection.create( random.choice(POLICIES), fauxfactory.gen_alphanumeric() ) yield policy_1, policy_2 policy_collection.delete(policy_1, policy_2) @pytest.fixture(params=POLICIES, ids=lambda policy_class: policy_class.__name__) def policy_class(request): return request.param @pytest.fixture(params=ALERT_PROFILES, ids=lambda alert_profile: alert_profile.__name__) def alert_profile_class(request): return request.param @pytest.yield_fixture def policy(policy_collection, policy_class): policy_ = policy_collection.create(policy_class, fauxfactory.gen_alphanumeric()) yield policy_ policy_.delete() @pytest.yield_fixture(params=CONDITIONS, ids=lambda condition_class: condition_class.__name__, scope="module") def condition_for_expressions(request, condition_collection): condition_class = request.param condition = condition_collection.create( condition_class, fauxfactory.gen_alphanumeric(), expression="fill_field({} : Name, IS NOT EMPTY)".format(condition_class.FIELD_VALUE), scope="fill_field({} : Name, INCLUDES, {})".format(condition_class.FIELD_VALUE, fauxfactory.gen_alpha()) ) yield condition condition.delete() @pytest.fixture(params=CONDITIONS, ids=lambda condition_class: condition_class.__name__) def condition_prerequisites(request, condition_collection): condition_class = request.param expression = "fill_field({} : Name, =, {})".format( condition_class.FIELD_VALUE, fauxfactory.gen_alphanumeric() ) scope = "fill_field({} : Name, =, {})".format( condition_class.FIELD_VALUE, fauxfactory.gen_alphanumeric() ) return condition_class, scope, expression @pytest.yield_fixture(params=CONTROL_POLICIES, ids=lambda policy_class: policy_class.__name__) def control_policy(request, policy_collection): policy_class = request.param policy = policy_collection.create(policy_class, fauxfactory.gen_alphanumeric()) yield policy policy.delete() @pytest.yield_fixture def action(action_collection): action_ = action_collection.create( fauxfactory.gen_alphanumeric(), action_type="Tag", action_values={"tag": ("My Company Tags", "Department", "Accounting")} ) yield action_ action_.delete() @pytest.yield_fixture def alert(alert_collection): alert_ = alert_collection.create( fauxfactory.gen_alphanumeric(), based_on=random.choice(ALERT_PROFILES).TYPE, timeline_event=True, driving_event="Hourly Timer" ) yield alert_ alert_.delete() @pytest.yield_fixture def alert_profile(alert_profile_class, alert_collection, alert_profile_collection): alert = alert_collection.create( fauxfactory.gen_alphanumeric(), based_on=alert_profile_class.TYPE, timeline_event=True, driving_event="Hourly Timer" ) alert_profile_ = alert_profile_collection.create( alert_profile_class, fauxfactory.gen_alphanumeric(), alerts=[alert.description] ) yield alert_profile_ alert_profile_.delete() alert.delete() @pytest.yield_fixture(params=POLICIES_AND_CONDITIONS, ids=lambda item: item.name) def policy_and_condition(request, policy_collection, condition_collection): condition_class = request.param.condition policy_class = request.param.policy expression = "fill_field({} : Name, =, {})".format( condition_class.FIELD_VALUE, fauxfactory.gen_alphanumeric() ) condition = condition_collection.create( condition_class, fauxfactory.gen_alphanumeric(), expression=expression ) policy = policy_collection.create( policy_class, fauxfactory.gen_alphanumeric() ) yield policy, condition policy.delete() condition.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_condition_crud(condition_collection, condition_prerequisites): # CR condition_class, scope, expression = condition_prerequisites condition = condition_collection.create( condition_class, fauxfactory.gen_alphanumeric(), scope=scope, expression=expression ) with update(condition): condition.notes = "Modified!" # D condition.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_action_crud(action_collection): # CR action = action_collection.create( fauxfactory.gen_alphanumeric(), action_type="Tag", action_values={"tag": ("My Company Tags", "Department", "Accounting")} ) # U with update(action): action.description = "w00t w00t" # D action.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_policy_crud(policy_collection, policy_class): # CR policy = policy_collection.create(policy_class, fauxfactory.gen_alphanumeric()) # U with update(policy): policy.notes = "Modified!" # D policy.delete() @pytest.mark.tier(3) def test_policy_copy(policy): random_policy_copy = policy.copy() assert random_policy_copy.exists random_policy_copy.delete() @pytest.mark.tier(3) def test_assign_two_random_events_to_control_policy(control_policy, soft_assert): random_events = random.sample(EVENTS, 2) control_policy.assign_events(*random_events) soft_assert(control_policy.is_event_assigned(random_events[0])) soft_assert(control_policy.is_event_assigned(random_events[1])) @pytest.mark.tier(2) @pytest.mark.meta(blockers=[BZ(1491576, forced_streams=["5.7.4"])]) def test_control_assign_actions_to_event(request, policy, action): if type(policy) in CONTROL_POLICIES: event = random.choice(EVENTS) policy.assign_events(event) request.addfinalizer(policy.assign_events) else: prefix = policy.TREE_NODE if not policy.TREE_NODE == "Vm" else policy.TREE_NODE.upper() event = "{} Compliance Check".format(prefix) request.addfinalizer(lambda: policy.assign_actions_to_event( event, {"Mark as Non-Compliant": False})) policy.assign_actions_to_event(event, action) assert str(action) == policy.assigned_actions_to_event(event)[0] @pytest.mark.tier(3) def test_assign_condition_to_control_policy(request, policy_and_condition): """This test checks if a condition is assigned to a control policy. Steps: * Create a control policy. * Assign a condition to the created policy. """ policy, condition = policy_and_condition policy.assign_conditions(condition) request.addfinalizer(policy.assign_conditions) assert policy.is_condition_assigned(condition) @pytest.mark.sauce @pytest.mark.tier(2) def test_policy_profile_crud(policy_profile_collection, two_random_policies): profile = policy_profile_collection.create( fauxfactory.gen_alphanumeric(), policies=two_random_policies ) with update(profile): profile.notes = "Modified!" profile.delete() @pytest.mark.tier(3) @pytest.mark.parametrize("fill_type,expression,verify", EXPRESSIONS_TO_TEST, ids=[ expr[0] for expr in EXPRESSIONS_TO_TEST]) def test_modify_condition_expression(condition_for_expressions, fill_type, expression, verify): with update(condition_for_expressions): condition_for_expressions.expression = expression.format( condition_for_expressions.FIELD_VALUE) assert condition_for_expressions.read_expression() == verify.format( condition_for_expressions.FIELD_VALUE) @pytest.mark.sauce @pytest.mark.tier(2) def test_alert_crud(alert_collection): # CR alert = alert_collection.create( fauxfactory.gen_alphanumeric(), timeline_event=True, driving_event="Hourly Timer" ) # U with update(alert): alert.notification_frequency = "2 Hours" # D alert.delete() @pytest.mark.tier(3) @pytest.mark.meta(blockers=[1303645], automates=[1303645]) def test_control_alert_copy(alert): alert_copy = alert.copy(description=fauxfactory.gen_alphanumeric()) assert alert_copy.exists alert_copy.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_alert_profile_crud(request, alert_profile_class, alert_collection, alert_profile_collection): alert = alert_collection.create( fauxfactory.gen_alphanumeric(), based_on=alert_profile_class.TYPE, timeline_event=True, driving_event="Hourly Timer" ) request.addfinalizer(alert.delete) alert_profile = alert_profile_collection.create( alert_profile_class, fauxfactory.gen_alphanumeric(), alerts=[alert.description] ) with update(alert_profile): alert_profile.notes = "Modified!" alert_profile.delete() @pytest.mark.tier(2) @pytest.mark.meta(blockers=[BZ(1416311, forced_streams=["5.7"])]) def test_alert_profile_assigning(alert_profile): if isinstance(alert_profile, alert_profiles.ServerAlertProfile): if BZ(1489697, forced_streams=["5.8"]).blocks: pytest.skip("BZ 1489697") alert_profile.assign_to("Selected Servers", selections=["Servers", "EVM"]) else: alert_profile.assign_to("The Enterprise") @pytest.mark.tier(2) @pytest.mark.uncollectif(lambda: current_version() < "5.8") def test_control_is_ansible_playbook_available_in_actions_dropdown(action_collection): view = navigate_to(action_collection, "Add") assert "Run Ansible Playbook" in [option.text for option in view.action_type.all_options]
quarckster/cfme_tests
cfme/tests/control/test_basic.py
cfme/utils/error.py
# Imports for backward compatility and convenience # NOQA all the things because import time from wrapanapi.base import VMInfo, WrapanapiAPIBase, Logger # NOQA from wrapanapi import exceptions # NOQA from wrapanapi.rhevm import RHEVMSystem as RHEVMSystemBase # NOQA from cfme.utils import conf from cfme.utils.log import logger from cfme.utils.ssh import SSHClient # Overrides from ovirtsdk.xml import params class RHEVMSystem(RHEVMSystemBase): def start_vm(self, vm_name=None, **kwargs): self.wait_vm_steady(vm_name) self.logger.info(' Starting RHEV VM %s', vm_name) vm = self._get_vm(vm_name) if vm.status.get_state() == 'up': self.logger.info(' RHEV VM %s os already running.', vm_name) return True else: if 'initialization' in kwargs: vm.start(kwargs['initialization']) else: vm.start() self.wait_vm_running(vm_name) return True def deploy_template(self, template, *args, **kwargs): power_on = kwargs.get('power_on', True) vm_name = super(RHEVMSystem, self).deploy_template(template, *args, **kwargs) if power_on: version = self.api.get_product_info().get_full_version() cfme_template = any( template.startswith(pfx) for pfx in ["cfme-55", "s_tpl", "sprout_template"]) if cfme_template and version.startswith("3.4"): action = params.Action(vm=params.VM(initialization=params.Initialization( cloud_init=params.CloudInit(users=params.Users( user=[params.User(user_name="root", password="smartvm")]))))) self.start_vm(vm_name=vm_name, initialization=action) else: self.start_vm(vm_name=vm_name) return vm_name def connect_direct_lun_to_appliance(self, vm_name, disconnect): """Connects or disconnects the direct lun disk to an appliance. Args: vm_name: Name of the VM with the appliance. disconnect: If False, it will connect, otherwise it will disconnect """ if "provider_key" in self.kwargs: provider_name = self.kwargs["provider_key"] else: raise TypeError("provider_key not supplied to the provider.") # check that the vm exists on the rhev provider, get the ip address if so try: vm = self.api.vms.get(vm_name) ip_addr = self.get_ip_address(vm_name) except: raise NameError("{} not found on {}".format(vm_name, provider_name)) # check for direct lun definition on provider's cfme_data.yaml if 'direct_lun' not in self.kwargs: raise ValueError( "direct_lun key not in cfme_data.yaml under provider {}, exiting...".format( provider_name)) # does the direct lun exist prov_data = self.kwargs dlun_name = prov_data['direct_lun']['name'] dlun = self.api.disks.get(dlun_name) if dlun is None: # Create the iSCSI storage connection: sc = params.StorageConnection() sc.set_address(prov_data['direct_lun']['ip_address']) sc.set_type("iscsi") sc.set_port(int(prov_data['direct_lun']['port'])) sc.set_target(prov_data['direct_lun']['iscsi_target']) # Add the direct LUN disk: lu = params.LogicalUnit() lu.set_id(prov_data['direct_lun']['iscsi_target']) lu.set_address(sc.get_address()) lu.set_port(sc.get_port()) lu.set_target(sc.get_target()) storage = params.Storage() storage.set_type("iscsi") storage.set_logical_unit([lu]) disk = params.Disk() disk.set_name(dlun_name) disk.set_interface("virtio") disk.set_type("iscsi") disk.set_format("raw") disk.set_lun_storage(storage) disk.set_shareable(True) disk = self.api.disks.add(disk) dlun = self.api.disks.get(dlun_name) # add it if not disconnect: retries = 0 while retries < 3: retries += 1 direct_lun = params.Disk(id=dlun.id) try: # is the disk present and active? vm_disk_list = vm.get_disks().list() for vm_disk in vm_disk_list: if vm_disk.name == dlun_name: if vm_disk.active: return else: vm_disk.activate() return # if not present, add it and activate direct_lun = params.Disk(id=dlun.id) added_lun = vm.disks.add(direct_lun) added_lun.activate() except Exception as e: logger.error("Exception caught: %s", str(e)) if retries == 3: logger.error("exhausted retries and giving up") raise else: logger.info("sleeping for 30s and retrying to connect direct lun") time.sleep(30) # Init SSH client, run pvscan on the appliance ssh_kwargs = { 'username': conf.credentials['ssh']['username'], 'password': conf.credentials['ssh']['password'], 'hostname': ip_addr } with SSHClient(**ssh_kwargs) as ssh_client: status, out = ssh_client.run_command('pvscan', timeout=5 * 60) # remove it else: vm_dlun = vm.disks.get(name=dlun_name) if vm_dlun is None: return else: detach = params.Action(detach=True) vm_dlun.delete(action=detach)
# -*- coding: utf-8 -*- """ Tests checking the basic functionality of the Control/Explorer section. Whether we can create/update/delete/assign/... these objects. Nothing with deep meaning. Can be also used as a unit-test for page model coverage. """ import random from collections import namedtuple import fauxfactory import pytest from cfme import test_requirements from cfme.control.explorer import alert_profiles, conditions, policies from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ from cfme.utils.update import update from cfme.utils.version import current_version pytestmark = [ pytest.mark.long_running, test_requirements.control ] EXPRESSIONS_TO_TEST = [ ( "Field", "fill_field({} : Last Compliance Timestamp, BEFORE, 03/04/2014)", '{} : Last Compliance Timestamp BEFORE "03/04/2014 00:00"' ), ( "Count", "fill_count({}.Compliance History, >, 0)", 'COUNT OF {}.Compliance History > 0' ), ( "Tag", "fill_tag({}.User.My Company Tags : Location, Chicago)", "{}.User.My Company Tags : Location CONTAINS 'Chicago'" ), ( "Find", "fill_find({}.Compliance History : Event Type, INCLUDES, some_string, Check Any," "Resource Type, =, another_string)", 'FIND {}.Compliance History : Event Type INCLUDES "some_string" CHECK ANY Resource Type' ' = "another_string"' ) ] COMPLIANCE_POLICIES = [ policies.HostCompliancePolicy, policies.VMCompliancePolicy, policies.ReplicatorCompliancePolicy, policies.PodCompliancePolicy, policies.ContainerNodeCompliancePolicy, policies.ContainerImageCompliancePolicy, ] CONTROL_POLICIES = [ policies.HostControlPolicy, policies.VMControlPolicy, policies.ReplicatorControlPolicy, policies.PodControlPolicy, policies.ContainerNodeControlPolicy, policies.ContainerImageControlPolicy ] POLICIES = COMPLIANCE_POLICIES + CONTROL_POLICIES CONDITIONS = [ conditions.HostCondition, conditions.VMCondition, conditions.ReplicatorCondition, conditions.PodCondition, conditions.ContainerNodeCondition, conditions.ContainerImageCondition, conditions.ProviderCondition ] PolicyAndCondition = namedtuple('PolicyAndCondition', ['name', 'policy', 'condition']) POLICIES_AND_CONDITIONS = [ PolicyAndCondition(name=obj[0].__name__, policy=obj[0], condition=obj[1]) for obj in zip(CONTROL_POLICIES, CONDITIONS) ] EVENTS = [ "Datastore Analysis Complete", "Datastore Analysis Request", "Host Auth Changed", "Host Auth Error", "Host Auth Incomplete Credentials", "Host Auth Invalid", "Host Auth Unreachable", "Host Auth Valid", "Provider Auth Changed", "Provider Auth Error", "Provider Auth Incomplete Credentials", "Provider Auth Invalid", "Provider Auth Unreachable", "Provider Auth Valid", "Tag Complete", "Tag Parent Cluster Complete", "Tag Parent Datastore Complete", "Tag Parent Host Complete", "Tag Parent Resource Pool Complete", "Tag Request", "Un-Tag Complete", "Un-Tag Parent Cluster Complete", "Un-Tag Parent Datastore Complete", "Un-Tag Parent Host Complete", "Un-Tag Parent Resource Pool Complete", "Un-Tag Request", "Container Image Compliance Failed", "Container Image Compliance Passed", "Container Node Compliance Failed", "Container Node Compliance Passed", "Host Compliance Failed", "Host Compliance Passed", "Pod Compliance Failed", "Pod Compliance Passed", "Replicator Compliance Failed", "Replicator Compliance Passed", "VM Compliance Failed", "VM Compliance Passed", "Container Image Analysis Complete", "Container Image Discovered", "Container Node Failed Mount", "Container Node Invalid Disk Capacity", "Container Node Not Ready", "Container Node Not Schedulable", "Container Node Ready", "Container Node Rebooted", "Container Node Schedulable", "Pod Deadline Exceeded", "Pod Failed Scheduling", "Pod Failed Sync", "Pod Failed Validation", "Pod Insufficient Free CPU", "Pod Insufficient Free Memory", "Pod Out of Disk", "Pod Scheduled", "Pod hostPort Conflict", "Pod nodeSelector Mismatching", "Replicator Failed Creating Pod", "Replicator Successfully Created Pod", "Host Added to Cluster", "Host Analysis Complete", "Host Analysis Request", "Host Connect", "Host Disconnect", "Host Maintenance Enter Request", "Host Maintenance Exit Request", "Host Provision Complete", "Host Reboot Request", "Host Removed from Cluster", "Host Reset Request", "Host Shutdown Request", "Host Standby Request", "Host Start Request", "Host Stop Request", "Host Vmotion Disable Request", "Host Vmotion Enable Request", "Service Provision Complete", "Service Retire Request", "Service Retired", "Service Retirement Warning", "Service Start Request", "Service Started", "Service Stop Request", "Service Stopped", "VM Clone Complete", "VM Clone Start", "VM Create Complete", "VM Delete (from Disk) Request", "VM Renamed Event", "VM Settings Change", "VM Template Create Complete", "VM Provision Complete", "VM Retire Request", "VM Retired", "VM Retirement Warning", "VM Analysis Complete", "VM Analysis Failure", "VM Analysis Request", "VM Analysis Start", "VM Guest Reboot", "VM Guest Reboot Request", "VM Guest Shutdown", "VM Guest Shutdown Request", "VM Live Migration (VMOTION)", "VM Pause", "VM Pause Request", "VM Power Off", "VM Power Off Request", "VM Power On", "VM Power On Request", "VM Remote Console Connected", "VM Removal from Inventory", "VM Removal from Inventory Request", "VM Reset", "VM Reset Request", "VM Resume", "VM Shelve", "VM Shelve Offload", "VM Shelve Offload Request", "VM Shelve Request", "VM Snapshot Create Complete", "VM Snapshot Create Request", "VM Snapshot Create Started", "VM Standby of Guest", "VM Standby of Guest Request", "VM Suspend", "VM Suspend Request" ] ALERT_PROFILES = [ alert_profiles.ClusterAlertProfile, alert_profiles.DatastoreAlertProfile, alert_profiles.HostAlertProfile, alert_profiles.ProviderAlertProfile, alert_profiles.ServerAlertProfile, alert_profiles.VMInstanceAlertProfile ] @pytest.fixture(scope="module") def policy_profile_collection(appliance): return appliance.collections.policy_profiles @pytest.fixture(scope="module") def policy_collection(appliance): return appliance.collections.policies @pytest.fixture(scope="module") def condition_collection(appliance): return appliance.collections.conditions @pytest.fixture(scope="module") def action_collection(appliance): return appliance.collections.actions @pytest.fixture(scope="module") def alert_collection(appliance): return appliance.collections.alerts @pytest.fixture(scope="module") def alert_profile_collection(appliance): return appliance.collections.alert_profiles @pytest.yield_fixture def two_random_policies(policy_collection): policy_1 = policy_collection.create( random.choice(POLICIES), fauxfactory.gen_alphanumeric() ) policy_2 = policy_collection.create( random.choice(POLICIES), fauxfactory.gen_alphanumeric() ) yield policy_1, policy_2 policy_collection.delete(policy_1, policy_2) @pytest.fixture(params=POLICIES, ids=lambda policy_class: policy_class.__name__) def policy_class(request): return request.param @pytest.fixture(params=ALERT_PROFILES, ids=lambda alert_profile: alert_profile.__name__) def alert_profile_class(request): return request.param @pytest.yield_fixture def policy(policy_collection, policy_class): policy_ = policy_collection.create(policy_class, fauxfactory.gen_alphanumeric()) yield policy_ policy_.delete() @pytest.yield_fixture(params=CONDITIONS, ids=lambda condition_class: condition_class.__name__, scope="module") def condition_for_expressions(request, condition_collection): condition_class = request.param condition = condition_collection.create( condition_class, fauxfactory.gen_alphanumeric(), expression="fill_field({} : Name, IS NOT EMPTY)".format(condition_class.FIELD_VALUE), scope="fill_field({} : Name, INCLUDES, {})".format(condition_class.FIELD_VALUE, fauxfactory.gen_alpha()) ) yield condition condition.delete() @pytest.fixture(params=CONDITIONS, ids=lambda condition_class: condition_class.__name__) def condition_prerequisites(request, condition_collection): condition_class = request.param expression = "fill_field({} : Name, =, {})".format( condition_class.FIELD_VALUE, fauxfactory.gen_alphanumeric() ) scope = "fill_field({} : Name, =, {})".format( condition_class.FIELD_VALUE, fauxfactory.gen_alphanumeric() ) return condition_class, scope, expression @pytest.yield_fixture(params=CONTROL_POLICIES, ids=lambda policy_class: policy_class.__name__) def control_policy(request, policy_collection): policy_class = request.param policy = policy_collection.create(policy_class, fauxfactory.gen_alphanumeric()) yield policy policy.delete() @pytest.yield_fixture def action(action_collection): action_ = action_collection.create( fauxfactory.gen_alphanumeric(), action_type="Tag", action_values={"tag": ("My Company Tags", "Department", "Accounting")} ) yield action_ action_.delete() @pytest.yield_fixture def alert(alert_collection): alert_ = alert_collection.create( fauxfactory.gen_alphanumeric(), based_on=random.choice(ALERT_PROFILES).TYPE, timeline_event=True, driving_event="Hourly Timer" ) yield alert_ alert_.delete() @pytest.yield_fixture def alert_profile(alert_profile_class, alert_collection, alert_profile_collection): alert = alert_collection.create( fauxfactory.gen_alphanumeric(), based_on=alert_profile_class.TYPE, timeline_event=True, driving_event="Hourly Timer" ) alert_profile_ = alert_profile_collection.create( alert_profile_class, fauxfactory.gen_alphanumeric(), alerts=[alert.description] ) yield alert_profile_ alert_profile_.delete() alert.delete() @pytest.yield_fixture(params=POLICIES_AND_CONDITIONS, ids=lambda item: item.name) def policy_and_condition(request, policy_collection, condition_collection): condition_class = request.param.condition policy_class = request.param.policy expression = "fill_field({} : Name, =, {})".format( condition_class.FIELD_VALUE, fauxfactory.gen_alphanumeric() ) condition = condition_collection.create( condition_class, fauxfactory.gen_alphanumeric(), expression=expression ) policy = policy_collection.create( policy_class, fauxfactory.gen_alphanumeric() ) yield policy, condition policy.delete() condition.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_condition_crud(condition_collection, condition_prerequisites): # CR condition_class, scope, expression = condition_prerequisites condition = condition_collection.create( condition_class, fauxfactory.gen_alphanumeric(), scope=scope, expression=expression ) with update(condition): condition.notes = "Modified!" # D condition.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_action_crud(action_collection): # CR action = action_collection.create( fauxfactory.gen_alphanumeric(), action_type="Tag", action_values={"tag": ("My Company Tags", "Department", "Accounting")} ) # U with update(action): action.description = "w00t w00t" # D action.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_policy_crud(policy_collection, policy_class): # CR policy = policy_collection.create(policy_class, fauxfactory.gen_alphanumeric()) # U with update(policy): policy.notes = "Modified!" # D policy.delete() @pytest.mark.tier(3) def test_policy_copy(policy): random_policy_copy = policy.copy() assert random_policy_copy.exists random_policy_copy.delete() @pytest.mark.tier(3) def test_assign_two_random_events_to_control_policy(control_policy, soft_assert): random_events = random.sample(EVENTS, 2) control_policy.assign_events(*random_events) soft_assert(control_policy.is_event_assigned(random_events[0])) soft_assert(control_policy.is_event_assigned(random_events[1])) @pytest.mark.tier(2) @pytest.mark.meta(blockers=[BZ(1491576, forced_streams=["5.7.4"])]) def test_control_assign_actions_to_event(request, policy, action): if type(policy) in CONTROL_POLICIES: event = random.choice(EVENTS) policy.assign_events(event) request.addfinalizer(policy.assign_events) else: prefix = policy.TREE_NODE if not policy.TREE_NODE == "Vm" else policy.TREE_NODE.upper() event = "{} Compliance Check".format(prefix) request.addfinalizer(lambda: policy.assign_actions_to_event( event, {"Mark as Non-Compliant": False})) policy.assign_actions_to_event(event, action) assert str(action) == policy.assigned_actions_to_event(event)[0] @pytest.mark.tier(3) def test_assign_condition_to_control_policy(request, policy_and_condition): """This test checks if a condition is assigned to a control policy. Steps: * Create a control policy. * Assign a condition to the created policy. """ policy, condition = policy_and_condition policy.assign_conditions(condition) request.addfinalizer(policy.assign_conditions) assert policy.is_condition_assigned(condition) @pytest.mark.sauce @pytest.mark.tier(2) def test_policy_profile_crud(policy_profile_collection, two_random_policies): profile = policy_profile_collection.create( fauxfactory.gen_alphanumeric(), policies=two_random_policies ) with update(profile): profile.notes = "Modified!" profile.delete() @pytest.mark.tier(3) @pytest.mark.parametrize("fill_type,expression,verify", EXPRESSIONS_TO_TEST, ids=[ expr[0] for expr in EXPRESSIONS_TO_TEST]) def test_modify_condition_expression(condition_for_expressions, fill_type, expression, verify): with update(condition_for_expressions): condition_for_expressions.expression = expression.format( condition_for_expressions.FIELD_VALUE) assert condition_for_expressions.read_expression() == verify.format( condition_for_expressions.FIELD_VALUE) @pytest.mark.sauce @pytest.mark.tier(2) def test_alert_crud(alert_collection): # CR alert = alert_collection.create( fauxfactory.gen_alphanumeric(), timeline_event=True, driving_event="Hourly Timer" ) # U with update(alert): alert.notification_frequency = "2 Hours" # D alert.delete() @pytest.mark.tier(3) @pytest.mark.meta(blockers=[1303645], automates=[1303645]) def test_control_alert_copy(alert): alert_copy = alert.copy(description=fauxfactory.gen_alphanumeric()) assert alert_copy.exists alert_copy.delete() @pytest.mark.sauce @pytest.mark.tier(2) def test_alert_profile_crud(request, alert_profile_class, alert_collection, alert_profile_collection): alert = alert_collection.create( fauxfactory.gen_alphanumeric(), based_on=alert_profile_class.TYPE, timeline_event=True, driving_event="Hourly Timer" ) request.addfinalizer(alert.delete) alert_profile = alert_profile_collection.create( alert_profile_class, fauxfactory.gen_alphanumeric(), alerts=[alert.description] ) with update(alert_profile): alert_profile.notes = "Modified!" alert_profile.delete() @pytest.mark.tier(2) @pytest.mark.meta(blockers=[BZ(1416311, forced_streams=["5.7"])]) def test_alert_profile_assigning(alert_profile): if isinstance(alert_profile, alert_profiles.ServerAlertProfile): if BZ(1489697, forced_streams=["5.8"]).blocks: pytest.skip("BZ 1489697") alert_profile.assign_to("Selected Servers", selections=["Servers", "EVM"]) else: alert_profile.assign_to("The Enterprise") @pytest.mark.tier(2) @pytest.mark.uncollectif(lambda: current_version() < "5.8") def test_control_is_ansible_playbook_available_in_actions_dropdown(action_collection): view = navigate_to(action_collection, "Add") assert "Run Ansible Playbook" in [option.text for option in view.action_type.all_options]
quarckster/cfme_tests
cfme/tests/control/test_basic.py
cfme/utils/mgmt_system/__init__.py