input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""The JuiceNet integration.""" from datetime import timedelta import logging import aiohttp from pyjuicenet import Api, TokenError import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR from .device import JuiceNetApi _LOGGER = logging.getLogger(__name__) PLATFORMS = ["sensor", "switch"] CONFIG_SCHEMA = vol.Schema( vol.All( cv.deprecated(DOMAIN), {DOMAIN: vol.Schema({vol.Required(CONF_ACCESS_TOKEN): cv.string})}, ), extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, config: dict): """Set up the JuiceNet component.""" conf = config.get(DOMAIN) hass.data.setdefault(DOMAIN, {}) if not conf: return True hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=conf ) ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up JuiceNet from a config entry.""" config = entry.data session = async_get_clientsession(hass) access_token = config[CONF_ACCESS_TOKEN] api = Api(access_token, session) juicenet = JuiceNetApi(api) try: await juicenet.setup() except TokenError as error: _LOGGER.error("JuiceNet Error %s", error) return False except aiohttp.ClientError as error: _LOGGER.error("Could not reach the JuiceNet API %s", error) raise ConfigEntryNotReady from error if not juicenet.devices: _LOGGER.error("No JuiceNet devices found for this account") return False _LOGGER.info("%d JuiceNet device(s) found", len(juicenet.devices)) async def async_update_data(): """Update all device states from the JuiceNet API.""" for device in juicenet.devices: await device.update_state(True) return True coordinator = DataUpdateCoordinator( hass, _LOGGER, name="JuiceNet", update_method=async_update_data, update_interval=timedelta(seconds=30), ) hass.data[DOMAIN][entry.entry_id] = { JUICENET_API: juicenet, JUICENET_COORDINATOR: coordinator, } await coordinator.async_config_entry_first_refresh() hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
"""The tests for the MQTT light platform. Configuration for RGB Version with brightness: light: platform: mqtt name: "Office Light RGB" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" brightness_state_topic: "office/rgb1/brightness/status" brightness_command_topic: "office/rgb1/brightness/set" rgb_state_topic: "office/rgb1/rgb/status" rgb_command_topic: "office/rgb1/rgb/set" qos: 0 payload_on: "on" payload_off: "off" Configuration for XY Version with brightness: light: platform: mqtt name: "Office Light XY" state_topic: "office/xy1/light/status" command_topic: "office/xy1/light/switch" brightness_state_topic: "office/xy1/brightness/status" brightness_command_topic: "office/xy1/brightness/set" xy_state_topic: "office/xy1/xy/status" xy_command_topic: "office/xy1/xy/set" qos: 0 payload_on: "on" payload_off: "off" config without RGB: light: platform: mqtt name: "Office Light" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" brightness_state_topic: "office/rgb1/brightness/status" brightness_command_topic: "office/rgb1/brightness/set" qos: 0 payload_on: "on" payload_off: "off" config without RGB and brightness: light: platform: mqtt name: "Office Light" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" qos: 0 payload_on: "on" payload_off: "off" config for RGB Version with brightness and scale: light: platform: mqtt name: "Office Light RGB" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" brightness_state_topic: "office/rgb1/brightness/status" brightness_command_topic: "office/rgb1/brightness/set" brightness_scale: 99 rgb_state_topic: "office/rgb1/rgb/status" rgb_command_topic: "office/rgb1/rgb/set" rgb_scale: 99 qos: 0 payload_on: "on" payload_off: "off" config with brightness and color temp light: platform: mqtt name: "Office Light Color Temp" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" brightness_state_topic: "office/rgb1/brightness/status" brightness_command_topic: "office/rgb1/brightness/set" brightness_scale: 99 color_temp_state_topic: "office/rgb1/color_temp/status" color_temp_command_topic: "office/rgb1/color_temp/set" qos: 0 payload_on: "on" payload_off: "off" config with brightness and effect light: platform: mqtt name: "Office Light Color Temp" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" brightness_state_topic: "office/rgb1/brightness/status" brightness_command_topic: "office/rgb1/brightness/set" brightness_scale: 99 effect_state_topic: "office/rgb1/effect/status" effect_command_topic: "office/rgb1/effect/set" effect_list: - rainbow - colorloop qos: 0 payload_on: "on" payload_off: "off" config for RGB Version with white value and scale: light: platform: mqtt name: "Office Light RGB" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" white_value_state_topic: "office/rgb1/white_value/status" white_value_command_topic: "office/rgb1/white_value/set" white_value_scale: 99 rgb_state_topic: "office/rgb1/rgb/status" rgb_command_topic: "office/rgb1/rgb/set" rgb_scale: 99 qos: 0 payload_on: "on" payload_off: "off" config for RGB Version with RGB command template: light: platform: mqtt name: "Office Light RGB" state_topic: "office/rgb1/light/status" command_topic: "office/rgb1/light/switch" rgb_state_topic: "office/rgb1/rgb/status" rgb_command_topic: "office/rgb1/rgb/set" rgb_command_template: "{{ '#%02x%02x%02x' | format(red, green, blue)}}" qos: 0 payload_on: "on" payload_off: "off" Configuration for HS Version with brightness: light: platform: mqtt name: "Office Light HS" state_topic: "office/hs1/light/status" command_topic: "office/hs1/light/switch" brightness_state_topic: "office/hs1/brightness/status" brightness_command_topic: "office/hs1/brightness/set" hs_state_topic: "office/hs1/hs/status" hs_command_topic: "office/hs1/hs/set" qos: 0 payload_on: "on" payload_off: "off" """ import json from os import path from unittest.mock import call, patch import pytest from homeassistant import config as hass_config from homeassistant.components import light from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_SUPPORTED_FEATURES, SERVICE_RELOAD, STATE_OFF, STATE_ON, ) import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import assert_setup_component, async_fire_mqtt_message from tests.components.light import common DEFAULT_CONFIG = { light.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"} } async def test_fail_setup_if_no_command_topic(hass, mqtt_mock): """Test if command fails with command topic.""" assert await async_setup_component( hass, light.DOMAIN, {light.DOMAIN: {"platform": "mqtt", "name": "test"}} ) await hass.async_block_till_done() assert hass.states.get("light.test") is None async def test_legacy_rgb_white_light(hass, mqtt_mock): """Test legacy RGB + white light flags brightness support.""" assert await async_setup_component( hass, light.DOMAIN, { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light_rgb/set", "rgb_command_topic": "test_light_rgb/rgb/set", "white_value_command_topic": "test_light_rgb/white/set", } }, ) await hass.async_block_till_done() state = hass.states.get("light.test") expected_features = ( light.SUPPORT_COLOR | light.SUPPORT_BRIGHTNESS | light.SUPPORT_WHITE_VALUE ) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == ["hs", "rgbw"] async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics(hass, mqtt_mock): """Test if there is no color and brightness if no topic.""" assert await async_setup_component( hass, light.DOMAIN, { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", } }, ) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == ["onoff"] async_fire_mqtt_message(hass, "test_light_rgb/status", "ON") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "onoff" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == ["onoff"] async def test_legacy_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling of the state via topic for legacy light (white_value).""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "brightness_state_topic": "test_light_rgb/brightness/status", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_state_topic": "test_light_rgb/rgb/status", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_state_topic": "test_light_rgb/color_temp/status", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_state_topic": "test_light_rgb/effect/status", "effect_command_topic": "test_light_rgb/effect/set", "hs_state_topic": "test_light_rgb/hs/status", "hs_command_topic": "test_light_rgb/hs/set", "white_value_state_topic": "test_light_rgb/white_value/status", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_state_topic": "test_light_rgb/xy/status", "xy_command_topic": "test_light_rgb/xy/set", "qos": "0", "payload_on": 1, "payload_off": 0, } } color_modes = ["color_temp", "hs", "rgbw"] assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb/status", "1") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/status", "0") state = hass.states.get("light.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "test_light_rgb/status", "1") async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "100") light_state = hass.states.get("light.test") assert light_state.attributes["brightness"] == 100 assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "300") light_state = hass.states.get("light.test") assert light_state.attributes.get("color_temp") is None assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/white_value/status", "100") light_state = hass.states.get("light.test") assert light_state.attributes["white_value"] == 100 assert light_state.attributes["color_temp"] == 300 assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/effect/status", "rainbow") light_state = hass.states.get("light.test") assert light_state.attributes["effect"] == "rainbow" assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/status", "1") async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", "125,125,125") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgb_color") is None assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/white_value/status", "0") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgb_color") == (255, 255, 255) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "200,50") light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (200, 50) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/xy/status", "0.675,0.322") light_state = hass.states.get("light.test") assert light_state.attributes.get("xy_color") == (0.672, 0.324) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling of the state via topic.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "brightness_state_topic": "test_light_rgb/brightness/status", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_state_topic": "test_light_rgb/rgb/status", "rgb_command_topic": "test_light_rgb/rgb/set", "rgbw_state_topic": "test_light_rgb/rgbw/status", "rgbw_command_topic": "test_light_rgb/rgbw/set", "rgbww_state_topic": "test_light_rgb/rgbww/status", "rgbww_command_topic": "test_light_rgb/rgbww/set", "color_temp_state_topic": "test_light_rgb/color_temp/status", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_state_topic": "test_light_rgb/effect/status", "effect_command_topic": "test_light_rgb/effect/set", "hs_state_topic": "test_light_rgb/hs/status", "hs_command_topic": "test_light_rgb/hs/set", "xy_state_topic": "test_light_rgb/xy/status", "xy_command_topic": "test_light_rgb/xy/set", "qos": "0", "payload_on": 1, "payload_off": 0, } } color_modes = ["color_temp", "hs", "rgb", "rgbw", "rgbww", "xy"] assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb/status", "1") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/status", "0") state = hass.states.get("light.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "test_light_rgb/status", "1") async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "100") light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") is None assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "300") light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") == 100 assert light_state.attributes["color_temp"] == 300 assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/effect/status", "rainbow") light_state = hass.states.get("light.test") assert light_state.attributes["effect"] == "rainbow" assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", "125,125,125") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgb_color") == (125, 125, 125) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "rgb" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/rgbw/status", "80,40,20,10") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgbw_color") == (80, 40, 20, 10) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "rgbw" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/rgbww/status", "80,40,20,10,8") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgbww_color") == (80, 40, 20, 10, 8) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "rgbww" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "200,50") light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (200, 50) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/xy/status", "0.675,0.322") light_state = hass.states.get("light.test") assert light_state.attributes.get("xy_color") == (0.675, 0.322) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "xy" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async def test_legacy_invalid_state_via_topic(hass, mqtt_mock, caplog): """Test handling of empty data via topic.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "brightness_state_topic": "test_light_rgb/brightness/status", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_state_topic": "test_light_rgb/rgb/status", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_state_topic": "test_light_rgb/color_temp/status", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_state_topic": "test_light_rgb/effect/status", "effect_command_topic": "test_light_rgb/effect/set", "hs_state_topic": "test_light_rgb/hs/status", "hs_command_topic": "test_light_rgb/hs/set", "white_value_state_topic": "test_light_rgb/white_value/status", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_state_topic": "test_light_rgb/xy/status", "xy_command_topic": "test_light_rgb/xy/set", "qos": "0", "payload_on": 1, "payload_off": 0, } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb/status", "1") async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", "255,255,255") async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "255") async_fire_mqtt_message(hass, "test_light_rgb/effect/status", "none") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (0, 0) assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") == (0.323, 0.329) async_fire_mqtt_message(hass, "test_light_rgb/status", "") assert "Ignoring empty state message" in caplog.text light_state = hass.states.get("light.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "") assert "Ignoring empty brightness message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["brightness"] == 255 async_fire_mqtt_message(hass, "test_light_rgb/effect/status", "") assert "Ignoring empty effect message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["effect"] == "none" async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", "") assert "Ignoring empty rgb message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("rgb_color") == (255, 255, 255) async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "") assert "Ignoring empty hs message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (0, 0) async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "bad,bad") assert "Failed to parse hs state update" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (0, 0) async_fire_mqtt_message(hass, "test_light_rgb/xy/status", "") assert "Ignoring empty xy-color message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("xy_color") == (0.323, 0.329) async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "153") async_fire_mqtt_message(hass, "test_light_rgb/white_value/status", "255") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_temp") == 153 assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") is None assert state.attributes.get("white_value") == 255 assert state.attributes.get("xy_color") is None async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "") assert "Ignoring empty color temp message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["color_temp"] == 153 async_fire_mqtt_message(hass, "test_light_rgb/white_value/status", "") assert "Ignoring empty white value message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["white_value"] == 255 async def test_invalid_state_via_topic(hass, mqtt_mock, caplog): """Test handling of empty data via topic.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "brightness_state_topic": "test_light_rgb/brightness/status", "brightness_command_topic": "test_light_rgb/brightness/set", "color_mode_state_topic": "test_light_rgb/color_mode/status", "rgb_state_topic": "test_light_rgb/rgb/status", "rgb_command_topic": "test_light_rgb/rgb/set", "rgbw_state_topic": "test_light_rgb/rgbw/status", "rgbw_command_topic": "test_light_rgb/rgbw/set", "rgbww_state_topic": "test_light_rgb/rgbww/status", "rgbww_command_topic": "test_light_rgb/rgbww/set", "color_temp_state_topic": "test_light_rgb/color_temp/status", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_state_topic": "test_light_rgb/effect/status", "effect_command_topic": "test_light_rgb/effect/set", "hs_state_topic": "test_light_rgb/hs/status", "hs_command_topic": "test_light_rgb/hs/set", "xy_state_topic": "test_light_rgb/xy/status", "xy_command_topic": "test_light_rgb/xy/set", "qos": "0", "payload_on": 1, "payload_off": 0, } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("xy_color") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb/status", "1") async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "rgb") async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", "255,255,255") async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "255") async_fire_mqtt_message(hass, "test_light_rgb/effect/status", "none") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (0, 0) assert state.attributes.get("xy_color") == (0.323, 0.329) assert state.attributes.get("color_mode") == "rgb" async_fire_mqtt_message(hass, "test_light_rgb/status", "") assert "Ignoring empty state message" in caplog.text light_state = hass.states.get("light.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "") assert "Ignoring empty brightness message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["brightness"] == 255 async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "") assert "Ignoring empty color mode message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["effect"] == "none" async_fire_mqtt_message(hass, "test_light_rgb/effect/status", "") assert "Ignoring empty effect message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["effect"] == "none" async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", "") assert "Ignoring empty rgb message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("rgb_color") == (255, 255, 255) async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "") assert "Ignoring empty hs message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (0, 0) async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "bad,bad") assert "Failed to parse hs state update" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (0, 0) async_fire_mqtt_message(hass, "test_light_rgb/xy/status", "") assert "Ignoring empty xy-color message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("xy_color") == (0.323, 0.329) async_fire_mqtt_message(hass, "test_light_rgb/rgbw/status", "255,255,255,1") async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "rgbw") async_fire_mqtt_message(hass, "test_light_rgb/rgbw/status", "") assert "Ignoring empty rgbw message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("rgbw_color") == (255, 255, 255, 1) async_fire_mqtt_message(hass, "test_light_rgb/rgbww/status", "255,255,255,1,2") async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "rgbww") async_fire_mqtt_message(hass, "test_light_rgb/rgbww/status", "") assert "Ignoring empty rgbww message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes.get("rgbww_color") == (255, 255, 255, 1, 2) async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "153") async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "color_temp") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_temp") == 153 assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") is None assert state.attributes.get("xy_color") is None async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "") assert "Ignoring empty color temp message" in caplog.text light_state = hass.states.get("light.test") assert light_state.attributes["color_temp"] == 153 async def test_brightness_controlling_scale(hass, mqtt_mock): """Test the brightness controlling scale.""" with assert_setup_component(1, light.DOMAIN): assert await async_setup_component( hass, light.DOMAIN, { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_scale/status", "command_topic": "test_scale/set", "brightness_state_topic": "test_scale/brightness/status", "brightness_command_topic": "test_scale/brightness/set", "brightness_scale": "99", "qos": 0, "payload_on": "on", "payload_off": "off", } }, ) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("brightness") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_scale/status", "on") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") is None async_fire_mqtt_message(hass, "test_scale/status", "off") state = hass.states.get("light.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "test_scale/status", "on") async_fire_mqtt_message(hass, "test_scale/brightness/status", "99") light_state = hass.states.get("light.test") assert light_state.attributes["brightness"] == 255 async def test_brightness_from_rgb_controlling_scale(hass, mqtt_mock): """Test the brightness controlling scale.""" with assert_setup_component(1, light.DOMAIN): assert await async_setup_component( hass, light.DOMAIN, { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_scale_rgb/status", "command_topic": "test_scale_rgb/set", "rgb_state_topic": "test_scale_rgb/rgb/status", "rgb_command_topic": "test_scale_rgb/rgb/set", "qos": 0, "payload_on": "on", "payload_off": "off", } }, ) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("brightness") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_scale_rgb/status", "on") async_fire_mqtt_message(hass, "test_scale_rgb/rgb/status", "255,0,0") state = hass.states.get("light.test") assert state.attributes.get("brightness") == 255 async_fire_mqtt_message(hass, "test_scale_rgb/rgb/status", "127,0,0") state = hass.states.get("light.test") assert state.attributes.get("brightness") == 127 async def test_legacy_white_value_controlling_scale(hass, mqtt_mock): """Test the white_value controlling scale.""" with assert_setup_component(1, light.DOMAIN): assert await async_setup_component( hass, light.DOMAIN, { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_scale/status", "command_topic": "test_scale/set", "white_value_state_topic": "test_scale/white_value/status", "white_value_command_topic": "test_scale/white_value/set", "white_value_scale": "99", "qos": 0, "payload_on": "on", "payload_off": "off", } }, ) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("white_value") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_scale/status", "on") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("white_value") is None async_fire_mqtt_message(hass, "test_scale/status", "off") state = hass.states.get("light.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "test_scale/status", "on") async_fire_mqtt_message(hass, "test_scale/white_value/status", "99") light_state = hass.states.get("light.test") assert light_state.attributes["white_value"] == 255 async def test_legacy_controlling_state_via_topic_with_templates(hass, mqtt_mock): """Test the setting of the state with a template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_command_topic": "test_light_rgb/xy/set", "brightness_state_topic": "test_light_rgb/brightness/status", "color_temp_state_topic": "test_light_rgb/color_temp/status", "effect_state_topic": "test_light_rgb/effect/status", "hs_state_topic": "test_light_rgb/hs/status", "rgb_state_topic": "test_light_rgb/rgb/status", "white_value_state_topic": "test_light_rgb/white_value/status", "xy_state_topic": "test_light_rgb/xy/status", "state_value_template": "{{ value_json.hello }}", "brightness_value_template": "{{ value_json.hello }}", "color_temp_value_template": "{{ value_json.hello }}", "effect_value_template": "{{ value_json.hello }}", "hs_value_template": '{{ value_json.hello | join(",") }}', "rgb_value_template": '{{ value_json.hello | join(",") }}', "white_value_template": "{{ value_json.hello }}", "xy_value_template": '{{ value_json.hello | join(",") }}', } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("brightness") is None assert state.attributes.get("rgb_color") is None async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", '{"hello": [1, 2, 3]}') async_fire_mqtt_message(hass, "test_light_rgb/status", '{"hello": "ON"}') async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", '{"hello": "50"}') async_fire_mqtt_message( hass, "test_light_rgb/color_temp/status", '{"hello": "300"}' ) async_fire_mqtt_message( hass, "test_light_rgb/effect/status", '{"hello": "rainbow"}' ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 50 assert state.attributes.get("rgb_color") == (84, 169, 255) assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") == "rainbow" assert state.attributes.get("white_value") is None async_fire_mqtt_message( hass, "test_light_rgb/white_value/status", '{"hello": "75"}' ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 50 assert state.attributes.get("rgb_color") is None assert state.attributes.get("color_temp") == 300 assert state.attributes.get("effect") == "rainbow" assert state.attributes.get("white_value") == 75 async_fire_mqtt_message(hass, "test_light_rgb/hs/status", '{"hello": [100,50]}') async_fire_mqtt_message(hass, "test_light_rgb/white_value/status", '{"hello": "0"}') state = hass.states.get("light.test") assert state.attributes.get("hs_color") == (100, 50) async_fire_mqtt_message( hass, "test_light_rgb/xy/status", '{"hello": [0.123,0.123]}' ) state = hass.states.get("light.test") assert state.attributes.get("xy_color") == (0.14, 0.131) async def test_controlling_state_via_topic_with_templates(hass, mqtt_mock): """Test the setting of the state with a template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_command_topic": "test_light_rgb/rgb/set", "rgbw_command_topic": "test_light_rgb/rgbw/set", "rgbww_command_topic": "test_light_rgb/rgbw/set", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "xy_command_topic": "test_light_rgb/xy/set", "brightness_state_topic": "test_light_rgb/brightness/status", "color_temp_state_topic": "test_light_rgb/color_temp/status", "effect_state_topic": "test_light_rgb/effect/status", "hs_state_topic": "test_light_rgb/hs/status", "rgb_state_topic": "test_light_rgb/rgb/status", "rgbw_state_topic": "test_light_rgb/rgbw/status", "rgbww_state_topic": "test_light_rgb/rgbww/status", "xy_state_topic": "test_light_rgb/xy/status", "state_value_template": "{{ value_json.hello }}", "brightness_value_template": "{{ value_json.hello }}", "color_temp_value_template": "{{ value_json.hello }}", "effect_value_template": "{{ value_json.hello }}", "hs_value_template": '{{ value_json.hello | join(",") }}', "rgb_value_template": '{{ value_json.hello | join(",") }}', "rgbw_value_template": '{{ value_json.hello | join(",") }}', "rgbww_value_template": '{{ value_json.hello | join(",") }}', "xy_value_template": '{{ value_json.hello | join(",") }}', } } color_modes = ["color_temp", "hs", "rgb", "rgbw", "rgbww", "xy"] assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("brightness") is None assert state.attributes.get("rgb_color") is None async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", '{"hello": [1, 2, 3]}') async_fire_mqtt_message(hass, "test_light_rgb/status", '{"hello": "ON"}') async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", '{"hello": "50"}') async_fire_mqtt_message( hass, "test_light_rgb/effect/status", '{"hello": "rainbow"}' ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 50 assert state.attributes.get("rgb_color") == (1, 2, 3) assert state.attributes.get("effect") == "rainbow" assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgb" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message( hass, "test_light_rgb/rgbw/status", '{"hello": [1, 2, 3, 4]}' ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgbw_color") == (1, 2, 3, 4) assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgbw" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message( hass, "test_light_rgb/rgbww/status", '{"hello": [1, 2, 3, 4, 5]}' ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgbww_color") == (1, 2, 3, 4, 5) assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgbww" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message( hass, "test_light_rgb/color_temp/status", '{"hello": "300"}' ) state = hass.states.get("light.test") assert state.attributes.get("color_temp") == 300 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/hs/status", '{"hello": [100,50]}') state = hass.states.get("light.test") assert state.attributes.get("hs_color") == (100, 50) assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message( hass, "test_light_rgb/xy/status", '{"hello": [0.123,0.123]}' ) state = hass.states.get("light.test") assert state.attributes.get("xy_color") == (0.123, 0.123) assert state.attributes.get(light.ATTR_COLOR_MODE) == "xy" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async def test_controlling_state_via_topic_with_value_template(hass, mqtt_mock): """Test the setting of the state with undocumented value_template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "value_template": "{{ value_json.hello }}", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "test_light_rgb/status", '{"hello": "ON"}') state = hass.states.get("light.test") assert state.state == STATE_ON async_fire_mqtt_message(hass, "test_light_rgb/status", '{"hello": "OFF"}') state = hass.states.get("light.test") assert state.state == STATE_OFF async def test_legacy_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending of command in optimistic mode.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_command_topic": "test_light_rgb/xy/set", "effect_list": ["colorloop", "random"], "qos": 2, "payload_on": "on", "payload_off": "off", } } color_modes = ["color_temp", "hs", "rgbw"] fake_state = ha.State( "light.test", "on", { "brightness": 95, "hs_color": [100, 100], "effect": "random", "color_temp": 100, # TODO: Test restoring state with white_value "white_value": 0, }, ) with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ), assert_setup_component(1, light.DOMAIN): assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" assert state.attributes.get("color_temp") is None assert state.attributes.get("white_value") is None assert state.attributes.get(ATTR_ASSUMED_STATE) assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "off", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_OFF mqtt_mock.reset_mock() await common.async_turn_on( hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) state = hass.states.get("light.test") assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) state = hass.states.get("light.test") assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) state = hass.states.get("light.test") assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), call("test_light_rgb/rgb/set", "255,128,0", 2, False), call("test_light_rgb/brightness/set", "50", 2, False), call("test_light_rgb/hs/set", "359.0,78.0", 2, False), call("test_light_rgb/xy/set", "0.14,0.131", 2, False), ], any_order=True, ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["rgb_color"] == (255, 128, 0) assert state.attributes["brightness"] == 50 assert state.attributes["hs_color"] == (30.118, 100) assert state.attributes.get("white_value") is None assert state.attributes["xy_color"] == (0.611, 0.375) assert state.attributes.get("color_temp") is None await common.async_turn_on(hass, "light.test", white_value=80, color_temp=125) state = hass.states.get("light.test") assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/white_value/set", "80", 2, False), call("test_light_rgb/color_temp/set", "125", 2, False), ], any_order=True, ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes["brightness"] == 50 assert state.attributes.get("hs_color") is None assert state.attributes["white_value"] == 80 assert state.attributes.get("xy_color") is None assert state.attributes["color_temp"] == 125 async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending of command in optimistic mode.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_command_topic": "test_light_rgb/rgb/set", "rgbw_command_topic": "test_light_rgb/rgbw/set", "rgbww_command_topic": "test_light_rgb/rgbww/set", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "xy_command_topic": "test_light_rgb/xy/set", "effect_list": ["colorloop", "random"], "qos": 2, "payload_on": "on", "payload_off": "off", } } color_modes = ["color_temp", "hs", "rgb", "rgbw", "rgbww", "xy"] fake_state = ha.State( "light.test", "on", { "brightness": 95, "hs_color": [100, 100], "effect": "random", "color_temp": 100, "color_mode": "hs", }, ) with patch( "homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state", return_value=fake_state, ), assert_setup_component(1, light.DOMAIN): assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" assert state.attributes.get("color_temp") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_on(hass, "light.test", effect="colorloop") mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), call("test_light_rgb/effect/set", "colorloop", 2, False), ], any_order=True, ) assert mqtt_mock.async_publish.call_count == 2 mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("effect") == "colorloop" assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "off", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( hass, "light.test", brightness=10, rgb_color=[80, 40, 20] ) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), call("test_light_rgb/brightness/set", "10", 2, False), call("test_light_rgb/rgb/set", "80,40,20", 2, False), ], any_order=True, ) assert mqtt_mock.async_publish.call_count == 3 mqtt_mock.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 10 assert state.attributes.get("rgb_color") == (80, 40, 20) assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgb" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( hass, "light.test", brightness=20, rgbw_color=[80, 40, 20, 10] ) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), call("test_light_rgb/brightness/set", "20", 2, False), call("test_light_rgb/rgbw/set", "80,40,20,10", 2, False), ], any_order=True, ) assert mqtt_mock.async_publish.call_count == 3 mqtt_mock.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 20 assert state.attributes.get("rgbw_color") == (80, 40, 20, 10) assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgbw" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( hass, "light.test", brightness=40, rgbww_color=[80, 40, 20, 10, 8] ) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), call("test_light_rgb/brightness/set", "40", 2, False), call("test_light_rgb/rgbww/set", "80,40,20,10,8", 2, False), ], any_order=True, ) assert mqtt_mock.async_publish.call_count == 3 mqtt_mock.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 40 assert state.attributes.get("rgbww_color") == (80, 40, 20, 10, 8) assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgbww" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), call("test_light_rgb/brightness/set", "50", 2, False), call("test_light_rgb/hs/set", "359.0,78.0", 2, False), ], any_order=True, ) assert mqtt_mock.async_publish.call_count == 3 mqtt_mock.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 50 assert state.attributes.get("hs_color") == (359.0, 78.0) assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on(hass, "light.test", brightness=60, xy_color=[0.2, 0.3]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), call("test_light_rgb/brightness/set", "60", 2, False), call("test_light_rgb/xy/set", "0.2,0.3", 2, False), ], any_order=True, ) assert mqtt_mock.async_publish.call_count == 3 mqtt_mock.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 60 assert state.attributes.get("xy_color") == (0.2, 0.3) assert state.attributes.get(light.ATTR_COLOR_MODE) == "xy" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on(hass, "light.test", color_temp=125) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/color_temp/set", "125", 2, False), ], any_order=True, ) assert mqtt_mock.async_publish.call_count == 2 mqtt_mock.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 60 assert state.attributes.get("color_temp") == 125 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async def test_sending_mqtt_rgb_command_with_template(hass, mqtt_mock): """Test the sending of RGB command with template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light_rgb/set", "rgb_command_topic": "test_light_rgb/rgb/set", "rgb_command_template": '{{ "#%02x%02x%02x" | ' "format(red, green, blue)}}", "payload_on": "on", "payload_off": "off", "qos": 0, } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 64]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 0, False), call("test_light_rgb/rgb/set", "#ff8040", 0, False), ], any_order=True, ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["rgb_color"] == (255, 128, 64) async def test_sending_mqtt_rgbw_command_with_template(hass, mqtt_mock): """Test the sending of RGBW command with template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light_rgb/set", "rgbw_command_topic": "test_light_rgb/rgbw/set", "rgbw_command_template": '{{ "#%02x%02x%02x%02x" | ' "format(red, green, blue, white)}}", "payload_on": "on", "payload_off": "off", "qos": 0, } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 64, 32]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 0, False), call("test_light_rgb/rgbw/set", "#ff804020", 0, False), ], any_order=True, ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["rgbw_color"] == (255, 128, 64, 32) async def test_sending_mqtt_rgbww_command_with_template(hass, mqtt_mock): """Test the sending of RGBWW command with template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light_rgb/set", "rgbww_command_topic": "test_light_rgb/rgbww/set", "rgbww_command_template": '{{ "#%02x%02x%02x%02x%02x" | ' "format(red, green, blue, cold_white, warm_white)}}", "payload_on": "on", "payload_off": "off", "qos": 0, } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 64, 32, 16]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 0, False), call("test_light_rgb/rgbww/set", "#ff80402010", 0, False), ], any_order=True, ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["rgbww_color"] == (255, 128, 64, 32, 16) async def test_sending_mqtt_color_temp_command_with_template(hass, mqtt_mock): """Test the sending of Color Temp command with template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light_color_temp/set", "color_temp_command_topic": "test_light_color_temp/color_temp/set", "color_temp_command_template": "{{ (1000 / value) | round(0) }}", "payload_on": "on", "payload_off": "off", "qos": 0, } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", color_temp=100) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_color_temp/set", "on", 0, False), call("test_light_color_temp/color_temp/set", "10", 0, False), ], any_order=True, ) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["color_temp"] == 100 async def test_on_command_first(hass, mqtt_mock): """Test on command being sent before brightness.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "brightness_command_topic": "test_light/bright", "on_command_type": "first", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=50) # Should get the following MQTT messages. # test_light/set: 'ON' # test_light/bright: 50 mqtt_mock.async_publish.assert_has_calls( [ call("test_light/set", "ON", 0, False), call("test_light/bright", "50", 0, False), ], ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) async def test_on_command_last(hass, mqtt_mock): """Test on command being sent after brightness.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "brightness_command_topic": "test_light/bright", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=50) # Should get the following MQTT messages. # test_light/bright: 50 # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/bright", "50", 0, False), call("test_light/set", "ON", 0, False), ], ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) async def test_on_command_brightness(hass, mqtt_mock): """Test on command being sent as only brightness.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "brightness_command_topic": "test_light/bright", "rgb_command_topic": "test_light/rgb", "on_command_type": "brightness", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF # Turn on w/ no brightness - should set to max await common.async_turn_on(hass, "light.test") # Should get the following MQTT messages. # test_light/bright: 255 mqtt_mock.async_publish.assert_called_once_with( "test_light/bright", "255", 0, False ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) mqtt_mock.async_publish.reset_mock() # Turn on w/ brightness await common.async_turn_on(hass, "light.test", brightness=50) mqtt_mock.async_publish.assert_called_once_with("test_light/bright", "50", 0, False) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") # Turn on w/ just a color to ensure brightness gets # added and sent. await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "255,128,0", 0, False), call("test_light/bright", "50", 0, False), ], any_order=True, ) async def test_on_command_brightness_scaled(hass, mqtt_mock): """Test brightness scale.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "brightness_command_topic": "test_light/bright", "brightness_scale": 100, "rgb_command_topic": "test_light/rgb", "on_command_type": "brightness", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF # Turn on w/ no brightness - should set to max await common.async_turn_on(hass, "light.test") # Should get the following MQTT messages. # test_light/bright: 100 mqtt_mock.async_publish.assert_called_once_with( "test_light/bright", "100", 0, False ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) mqtt_mock.async_publish.reset_mock() # Turn on w/ brightness await common.async_turn_on(hass, "light.test", brightness=50) mqtt_mock.async_publish.assert_called_once_with("test_light/bright", "20", 0, False) mqtt_mock.async_publish.reset_mock() # Turn on w/ max brightness await common.async_turn_on(hass, "light.test", brightness=255) mqtt_mock.async_publish.assert_called_once_with( "test_light/bright", "100", 0, False ) mqtt_mock.async_publish.reset_mock() # Turn on w/ min brightness await common.async_turn_on(hass, "light.test", brightness=1) mqtt_mock.async_publish.assert_called_once_with("test_light/bright", "1", 0, False) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") # Turn on w/ just a color to ensure brightness gets # added and sent. await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "255,128,0", 0, False), call("test_light/bright", "1", 0, False), ], any_order=True, ) async def test_legacy_on_command_rgb(hass, mqtt_mock): """Test on command in RGB brightness mode.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "rgb_command_topic": "test_light/rgb", "white_value_command_topic": "test_light/white_value", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=127) # Should get the following MQTT messages. # test_light/rgb: '127,127,127' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "127,127,127", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgb: '255,255,255' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "255,255,255", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=1) # Should get the following MQTT messages. # test_light/rgb: '1,1,1' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "1,1,1", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "1,0,0", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgb: '255,128,0' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "255,128,0", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() async def test_on_command_rgb(hass, mqtt_mock): """Test on command in RGB brightness mode.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "rgb_command_topic": "test_light/rgb", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=127) # Should get the following MQTT messages. # test_light/rgb: '127,127,127' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "127,127,127", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgb: '255,255,255' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "255,255,255", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=1) # Should get the following MQTT messages. # test_light/rgb: '1,1,1' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "1,1,1", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "1,0,0", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgb: '255,128,0' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "255,128,0", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() async def test_on_command_rgbw(hass, mqtt_mock): """Test on command in RGBW brightness mode.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "rgbw_command_topic": "test_light/rgbw", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=127) # Should get the following MQTT messages. # test_light/rgbw: '127,127,127,127' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbw", "127,127,127,127", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgbw: '255,255,255,255' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbw", "255,255,255,255", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=1) # Should get the following MQTT messages. # test_light/rgbw: '1,1,1,1' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbw", "1,1,1,1", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 0, 16]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbw", "1,0,0,0", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgbw: '255,128,0' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbw", "255,128,0,16", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() async def test_on_command_rgbww(hass, mqtt_mock): """Test on command in RGBWW brightness mode.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "rgbww_command_topic": "test_light/rgbww", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=127) # Should get the following MQTT messages. # test_light/rgbww: '127,127,127,127,127' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbww", "127,127,127,127,127", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgbww: '255,255,255,255,255' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbww", "255,255,255,255,255", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=1) # Should get the following MQTT messages. # test_light/rgbww: '1,1,1,1,1' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbww", "1,1,1,1,1", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 0, 16, 32]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbww", "1,0,0,0,0", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "light.test", brightness=255) # Should get the following MQTT messages. # test_light/rgbww: '255,128,0,16,32' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbww", "255,128,0,16,32", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() async def test_on_command_rgb_template(hass, mqtt_mock): """Test on command in RGB brightness mode with RGB template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "rgb_command_topic": "test_light/rgb", "rgb_command_template": "{{ red }}/{{ green }}/{{ blue }}", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=127) # Should get the following MQTT messages. # test_light/rgb: '127/127/127' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgb", "127/127/127", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) async def test_on_command_rgbw_template(hass, mqtt_mock): """Test on command in RGBW brightness mode with RGBW template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "rgbw_command_topic": "test_light/rgbw", "rgbw_command_template": "{{ red }}/{{ green }}/{{ blue }}/{{ white }}", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=127) # Should get the following MQTT messages. # test_light/rgb: '127/127/127/127' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbw", "127/127/127/127", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) async def test_on_command_rgbww_template(hass, mqtt_mock): """Test on command in RGBWW brightness mode with RGBWW template.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "rgbww_command_topic": "test_light/rgbww", "rgbww_command_template": "{{ red }}/{{ green }}/{{ blue }}/{{ cold_white }}/{{ warm_white }}", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", brightness=127) # Should get the following MQTT messages. # test_light/rgb: '127/127/127/127/127' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/rgbww", "127/127/127/127/127", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) async def test_explicit_color_mode(hass, mqtt_mock): """Test explicit color mode over mqtt.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "color_mode_state_topic": "test_light_rgb/color_mode/status", "brightness_state_topic": "test_light_rgb/brightness/status", "brightness_command_topic": "test_light_rgb/brightness/set", "rgb_state_topic": "test_light_rgb/rgb/status", "rgb_command_topic": "test_light_rgb/rgb/set", "rgbw_state_topic": "test_light_rgb/rgbw/status", "rgbw_command_topic": "test_light_rgb/rgbw/set", "rgbww_state_topic": "test_light_rgb/rgbww/status", "rgbww_command_topic": "test_light_rgb/rgbww/set", "color_temp_state_topic": "test_light_rgb/color_temp/status", "color_temp_command_topic": "test_light_rgb/color_temp/set", "effect_state_topic": "test_light_rgb/effect/status", "effect_command_topic": "test_light_rgb/effect/set", "hs_state_topic": "test_light_rgb/hs/status", "hs_command_topic": "test_light_rgb/hs/set", "xy_state_topic": "test_light_rgb/xy/status", "xy_command_topic": "test_light_rgb/xy/set", "qos": "0", "payload_on": 1, "payload_off": 0, } } color_modes = ["color_temp", "hs", "rgb", "rgbw", "rgbww", "xy"] assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb/status", "1") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("white_value") is None assert state.attributes.get("xy_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/status", "0") state = hass.states.get("light.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "test_light_rgb/status", "1") async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "100") light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") is None assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "300") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/effect/status", "rainbow") light_state = hass.states.get("light.test") assert light_state.attributes["effect"] == "rainbow" assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/rgb/status", "125,125,125") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/rgbw/status", "80,40,20,10") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/rgbww/status", "80,40,20,10,8") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "200,50") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/xy/status", "0.675,0.322") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "color_temp") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "rgb") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgb_color") == (125, 125, 125) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "rgb" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "rgbw") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgbw_color") == (80, 40, 20, 10) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "rgbw" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "rgbww") light_state = hass.states.get("light.test") assert light_state.attributes.get("rgbww_color") == (80, 40, 20, 10, 8) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "rgbww" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "hs") light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (200, 50) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_mode/status", "xy") light_state = hass.states.get("light.test") assert light_state.attributes.get("xy_color") == (0.675, 0.322) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "xy" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async def test_explicit_color_mode_templated(hass, mqtt_mock): """Test templated explicit color mode over mqtt.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "state_topic": "test_light_rgb/status", "command_topic": "test_light_rgb/set", "color_mode_state_topic": "test_light_rgb/color_mode/status", "color_mode_value_template": "{{ value_json.color_mode }}", "brightness_state_topic": "test_light_rgb/brightness/status", "brightness_command_topic": "test_light_rgb/brightness/set", "color_temp_state_topic": "test_light_rgb/color_temp/status", "color_temp_command_topic": "test_light_rgb/color_temp/set", "hs_state_topic": "test_light_rgb/hs/status", "hs_command_topic": "test_light_rgb/hs/set", "qos": "0", "payload_on": 1, "payload_off": 0, } } color_modes = ["color_temp", "hs"] assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb/status", "1") state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") is None assert state.attributes.get("color_temp") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/status", "0") state = hass.states.get("light.test") assert state.state == STATE_OFF async_fire_mqtt_message(hass, "test_light_rgb/status", "1") async_fire_mqtt_message(hass, "test_light_rgb/brightness/status", "100") light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") is None assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "300") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message(hass, "test_light_rgb/hs/status", "200,50") light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message( hass, "test_light_rgb/color_mode/status", '{"color_mode":"color_temp"}' ) light_state = hass.states.get("light.test") assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async_fire_mqtt_message( hass, "test_light_rgb/color_mode/status", '{"color_mode":"hs"}' ) light_state = hass.states.get("light.test") assert light_state.attributes.get("hs_color") == (200, 50) assert light_state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert light_state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes async def test_effect(hass, mqtt_mock): """Test effect.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_light/set", "effect_command_topic": "test_light/effect/set", "effect_list": ["rainbow", "colorloop"], } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.state == STATE_OFF await common.async_turn_on(hass, "light.test", effect="rainbow") # Should get the following MQTT messages. # test_light/effect/set: 'rainbow' # test_light/set: 'ON' mqtt_mock.async_publish.assert_has_calls( [ call("test_light/effect/set", "rainbow", 0, False), call("test_light/set", "ON", 0, False), ], any_order=True, ) mqtt_mock.async_publish.reset_mock() await common.async_turn_off(hass, "light.test") mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, light.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, light.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, light.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one light per unique_id.""" config = { light.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "state_topic": "test-topic", "command_topic": "test_topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "state_topic": "test-topic", "command_topic": "test_topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, light.DOMAIN, config) async def test_discovery_removal_light(hass, mqtt_mock, caplog): """Test removal of discovered light.""" data = ( '{ "name": "test",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_removal(hass, mqtt_mock, caplog, light.DOMAIN, data) async def test_discovery_deprecated(hass, mqtt_mock, caplog): """Test discovery of mqtt light with deprecated platform option.""" data = ( '{ "name": "Beer",' ' "platform": "mqtt",' ' "command_topic": "test_topic"}' ) async_fire_mqtt_message(hass, "homeassistant/light/bla/config", data) await hass.async_block_till_done() state = hass.states.get("light.beer") assert state is not None assert state.name == "Beer" async def test_discovery_update_light_topic_and_template(hass, mqtt_mock, caplog): """Test update of discovered light.""" data1 = json.dumps( { "name": "Beer", "state_topic": "test_light_rgb/state1", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/state1", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_command_topic": "test_light_rgb/state1", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_command_topic": "test_light_rgb/xy/set", "brightness_state_topic": "test_light_rgb/state1", "color_temp_state_topic": "test_light_rgb/state1", "effect_state_topic": "test_light_rgb/state1", "hs_state_topic": "test_light_rgb/state1", "rgb_state_topic": "test_light_rgb/state1", "white_value_state_topic": "test_light_rgb/state1", "xy_state_topic": "test_light_rgb/state1", "state_value_template": "{{ value_json.state1.state }}", "brightness_value_template": "{{ value_json.state1.brightness }}", "color_temp_value_template": "{{ value_json.state1.ct }}", "effect_value_template": "{{ value_json.state1.fx }}", "hs_value_template": "{{ value_json.state1.hs }}", "rgb_value_template": "{{ value_json.state1.rgb }}", "white_value_template": "{{ value_json.state1.white }}", "xy_value_template": "{{ value_json.state1.xy }}", } ) data2 = json.dumps( { "name": "Milk", "state_topic": "test_light_rgb/state2", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/state2", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_command_topic": "test_light_rgb/state2", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_command_topic": "test_light_rgb/xy/set", "brightness_state_topic": "test_light_rgb/state2", "color_temp_state_topic": "test_light_rgb/state2", "effect_state_topic": "test_light_rgb/state2", "hs_state_topic": "test_light_rgb/state2", "rgb_state_topic": "test_light_rgb/state2", "white_value_state_topic": "test_light_rgb/state2", "xy_state_topic": "test_light_rgb/state2", "state_value_template": "{{ value_json.state2.state }}", "brightness_value_template": "{{ value_json.state2.brightness }}", "color_temp_value_template": "{{ value_json.state2.ct }}", "effect_value_template": "{{ value_json.state2.fx }}", "hs_value_template": "{{ value_json.state2.hs }}", "rgb_value_template": "{{ value_json.state2.rgb }}", "white_value_template": "{{ value_json.state2.white }}", "xy_value_template": "{{ value_json.state2.xy }}", } ) state_data1 = [ ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "brightness":100, "ct":123, "white":100, "fx":"cycle"}}', ) ], "on", [ ("brightness", 100), ("color_temp", 123), ("white_value", 100), ("effect", "cycle"), ], ), ( [("test_light_rgb/state1", '{"state1":{"state":"OFF"}}')], "off", None, ), ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "hs":"1,2", "white":0}}', ) ], "on", [("hs_color", (1, 2)), ("white_value", None)], ), ( [ ( "test_light_rgb/state1", '{"state1":{"rgb":"255,127,63"}}', ) ], "on", [("rgb_color", (255, 127, 63))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"xy":"0.3, 0.4"}}', ) ], "on", [("xy_color", (0.3, 0.401))], ), ] state_data2 = [ ( [ ( "test_light_rgb/state2", '{"state2":{"state":"ON", "brightness":50, "ct":200, "white":50, "fx":"loop"}}', ) ], "on", [ ("brightness", 50), ("color_temp", 200), ("white_value", 50), ("effect", "loop"), ], ), ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "brightness":100, "ct":123, "fx":"cycle"}}', ), ( "test_light_rgb/state1", '{"state2":{"state":"ON", "brightness":100, "ct":123, "fx":"cycle"}}', ), ( "test_light_rgb/state2", '{"state1":{"state":"ON", "brightness":100, "ct":123, "fx":"cycle"}}', ), ], "on", [("brightness", 50), ("color_temp", 200), ("effect", "loop")], ), ( [("test_light_rgb/state1", '{"state1":{"state":"OFF"}}')], "on", None, ), ( [("test_light_rgb/state1", '{"state2":{"state":"OFF"}}')], "on", None, ), ( [("test_light_rgb/state2", '{"state1":{"state":"OFF"}}')], "on", None, ), ( [("test_light_rgb/state2", '{"state2":{"state":"OFF"}}')], "off", None, ), ( [ ( "test_light_rgb/state2", '{"state2":{"state":"ON", "hs":"1.2,2.2", "white":0}}', ) ], "on", [("hs_color", (1.2, 2.2)), ("white_value", None)], ), ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "hs":"1,2"}}', ), ( "test_light_rgb/state1", '{"state2":{"state":"ON", "hs":"1,2"}}', ), ( "test_light_rgb/state2", '{"state1":{"state":"ON", "hs":"1,2"}}', ), ], "on", [("hs_color", (1.2, 2.2))], ), ( [ ( "test_light_rgb/state2", '{"state2":{"rgb":"63,127,255"}}', ) ], "on", [("rgb_color", (63, 127, 255))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"rgb":"255,127,63"}}', ), ( "test_light_rgb/state1", '{"state2":{"rgb":"255,127,63"}}', ), ( "test_light_rgb/state2", '{"state1":{"rgb":"255,127,63"}}', ), ], "on", [("rgb_color", (63, 127, 255))], ), ( [ ( "test_light_rgb/state2", '{"state2":{"xy":"0.4, 0.3"}}', ) ], "on", [("xy_color", (0.4, 0.3))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"white":50, "xy":"0.3, 0.4"}}', ), ( "test_light_rgb/state1", '{"state2":{"white":50, "xy":"0.3, 0.4"}}', ), ( "test_light_rgb/state2", '{"state1":{"white":50, "xy":"0.3, 0.4"}}', ), ], "on", [("xy_color", (0.4, 0.3))], ), ] await help_test_discovery_update( hass, mqtt_mock, caplog, light.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_light_template(hass, mqtt_mock, caplog): """Test update of discovered light.""" data1 = json.dumps( { "name": "Beer", "state_topic": "test_light_rgb/state1", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/state1", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_command_topic": "test_light_rgb/state1", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_command_topic": "test_light_rgb/xy/set", "brightness_state_topic": "test_light_rgb/state1", "color_temp_state_topic": "test_light_rgb/state1", "effect_state_topic": "test_light_rgb/state1", "hs_state_topic": "test_light_rgb/state1", "rgb_state_topic": "test_light_rgb/state1", "white_value_state_topic": "test_light_rgb/state1", "xy_state_topic": "test_light_rgb/state1", "state_value_template": "{{ value_json.state1.state }}", "brightness_value_template": "{{ value_json.state1.brightness }}", "color_temp_value_template": "{{ value_json.state1.ct }}", "effect_value_template": "{{ value_json.state1.fx }}", "hs_value_template": "{{ value_json.state1.hs }}", "rgb_value_template": "{{ value_json.state1.rgb }}", "white_value_template": "{{ value_json.state1.white }}", "xy_value_template": "{{ value_json.state1.xy }}", } ) data2 = json.dumps( { "name": "Milk", "state_topic": "test_light_rgb/state1", "command_topic": "test_light_rgb/set", "brightness_command_topic": "test_light_rgb/state1", "rgb_command_topic": "test_light_rgb/rgb/set", "color_temp_command_topic": "test_light_rgb/state1", "effect_command_topic": "test_light_rgb/effect/set", "hs_command_topic": "test_light_rgb/hs/set", "white_value_command_topic": "test_light_rgb/white_value/set", "xy_command_topic": "test_light_rgb/xy/set", "brightness_state_topic": "test_light_rgb/state1", "color_temp_state_topic": "test_light_rgb/state1", "effect_state_topic": "test_light_rgb/state1", "hs_state_topic": "test_light_rgb/state1", "rgb_state_topic": "test_light_rgb/state1", "white_value_state_topic": "test_light_rgb/state1", "xy_state_topic": "test_light_rgb/state1", "state_value_template": "{{ value_json.state2.state }}", "brightness_value_template": "{{ value_json.state2.brightness }}", "color_temp_value_template": "{{ value_json.state2.ct }}", "effect_value_template": "{{ value_json.state2.fx }}", "hs_value_template": "{{ value_json.state2.hs }}", "rgb_value_template": "{{ value_json.state2.rgb }}", "white_value_template": "{{ value_json.state2.white }}", "xy_value_template": "{{ value_json.state2.xy }}", } ) state_data1 = [ ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "brightness":100, "ct":123, "white":100, "fx":"cycle"}}', ) ], "on", [ ("brightness", 100), ("color_temp", 123), ("white_value", 100), ("effect", "cycle"), ], ), ( [("test_light_rgb/state1", '{"state1":{"state":"OFF"}}')], "off", None, ), ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "hs":"1,2", "white":0}}', ) ], "on", [("hs_color", (1, 2))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"rgb":"255,127,63"}}', ) ], "on", [("rgb_color", (255, 127, 63))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"white":0, "xy":"0.3, 0.4"}}', ) ], "on", [("white_value", None), ("xy_color", (0.3, 0.401))], ), ] state_data2 = [ ( [ ( "test_light_rgb/state1", '{"state2":{"state":"ON", "brightness":50, "ct":200, "white":50, "fx":"loop"}}', ) ], "on", [ ("brightness", 50), ("color_temp", 200), ("white_value", 50), ("effect", "loop"), ], ), ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "brightness":100, "ct":123, "fx":"cycle"}}', ), ], "on", [("brightness", 50), ("color_temp", 200), ("effect", "loop")], ), ( [("test_light_rgb/state1", '{"state1":{"state":"OFF"}}')], "on", None, ), ( [("test_light_rgb/state1", '{"state2":{"state":"OFF"}}')], "off", None, ), ( [ ( "test_light_rgb/state1", '{"state2":{"state":"ON", "hs":"1.2,2.2", "white":0}}', ) ], "on", [("hs_color", (1.2, 2.2))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"state":"ON", "hs":"1,2"}}', ) ], "on", [("hs_color", (1.2, 2.2))], ), ( [ ( "test_light_rgb/state1", '{"state2":{"rgb":"63,127,255"}}', ) ], "on", [("rgb_color", (63, 127, 255))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"rgb":"255,127,63"}}', ) ], "on", [("rgb_color", (63, 127, 255))], ), ( [ ( "test_light_rgb/state1", '{"state2":{"xy":"0.4, 0.3"}}', ) ], "on", [("white_value", None), ("xy_color", (0.4, 0.3))], ), ( [ ( "test_light_rgb/state1", '{"state1":{"white":50, "xy":"0.3, 0.4"}}', ) ], "on", [("white_value", None), ("xy_color", (0.4, 0.3))], ), ] await help_test_discovery_update( hass, mqtt_mock, caplog, light.DOMAIN, data1, data2, state_data1=state_data1, state_data2=state_data2, ) async def test_discovery_update_unchanged_light(hass, mqtt_mock, caplog): """Test update of discovered light.""" data1 = ( '{ "name": "Beer",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) with patch( "homeassistant.components.mqtt.light.schema_basic.MqttLight.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, light.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = ( '{ "name": "Milk",' ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( hass, mqtt_mock, caplog, light.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT light device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT light device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG ) async def test_max_mireds(hass, mqtt_mock): """Test setting min_mireds and max_mireds.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test_max_mireds/set", "color_temp_command_topic": "test_max_mireds/color_temp/set", "max_mireds": 370, } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() state = hass.states.get("light.test") assert state.attributes.get("min_mireds") == 153 assert state.attributes.get("max_mireds") == 370 async def test_reloadable(hass, mqtt_mock): """Test reloading an mqtt light.""" config = { light.DOMAIN: { "platform": "mqtt", "name": "test", "command_topic": "test/set", } } assert await async_setup_component(hass, light.DOMAIN, config) await hass.async_block_till_done() assert hass.states.get("light.test") assert len(hass.states.async_all()) == 1 yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "mqtt/configuration.yaml", ) with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( "mqtt", SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 1 assert hass.states.get("light.test") is None assert hass.states.get("light.reload") def _get_fixtures_base_path(): return path.dirname(path.dirname(path.dirname(__file__)))
home-assistant/home-assistant
tests/components/mqtt/test_light.py
homeassistant/components/juicenet/__init__.py
from plenum.common.messages.fields import MerkleRootField from plenum.test.input_validation.utils import b58_by_len validator = MerkleRootField() def test_non_empty_merkle_roots(): for byte_len in range(1, 33): val = b58_by_len(byte_len) if byte_len == 32: assert not validator.validate(val) else: assert validator.validate(val) def test_empty_string(): assert validator.validate('') def test_invalid_symbol(): res = validator.validate(b58_by_len(32)[:-1] + '0') assert res assert (res == "should not contain the following chars {}". format(sorted(set('0'))))
import pytest from stp_core.common.log import getlogger from plenum.test.node_catchup.helper import waitNodeDataEquality from plenum.common.util import randomString from plenum.test.test_node import checkNodesConnected from plenum.test.pool_transactions.helper import sdk_add_new_steward_and_node from plenum.test import waits logger = getlogger() @pytest.fixture(scope="function", autouse=True) def limitTestRunningTime(): return 150 def add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, all_plugins_path, name=None): node_name = name or randomString(5) new_steward_name = "testClientSteward" + randomString(3) new_steward_wallet_handle, new_node = \ sdk_add_new_steward_and_node(looper, sdk_pool_handle, sdk_wallet_steward, new_steward_name, node_name, tdir, tconf, all_plugins_path) nodes.append(new_node) looper.run(checkNodesConnected(nodes, customTimeout=60)) timeout = waits.expectedPoolCatchupTime(nodeCount=len(nodes)) waitNodeDataEquality(looper, new_node, *nodes[:-1], customTimeout=timeout, exclude_from_check=['check_last_ordered_3pc_backup']) return new_node def test_add_node_with_f_changed(looper, txnPoolNodeSet, tdir, tconf, allPluginsPath, sdk_pool_handle, sdk_wallet_steward, limitTestRunningTime): nodes = txnPoolNodeSet add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node5") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node6") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node7") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node8") # check that all nodes have equal number of replica assert len(set([n.replicas.num_replicas for n in txnPoolNodeSet])) == 1 assert txnPoolNodeSet[-1].replicas.num_replicas == txnPoolNodeSet[-1].requiredNumberOfInstances
evernym/plenum
plenum/test/primary_selection/test_add_node_with_f_changed.py
plenum/test/input_validation/fields_validation/test_merkle_tree_root_field.py
from plenum.test.checkpoints.helper import check_for_nodes, check_stable_checkpoint, check_num_unstable_checkpoints from stp_core.loop.eventually import eventually from plenum.test import waits from plenum.test.delayers import ppDelay from plenum.test.test_node import getPrimaryReplica from plenum.test.helper import sdk_send_random_and_check def test_stable_checkpoint_when_one_instance_slow(chkFreqPatched, tconf, looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client, reqs_for_checkpoint): delay = 5 pr = getPrimaryReplica(txnPoolNodeSet, 1) slowNode = pr.node otherNodes = [n for n in txnPoolNodeSet if n != slowNode] for n in otherNodes: n.nodeIbStasher.delay(ppDelay(delay, 1)) sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client, reqs_for_checkpoint) timeout = waits.expectedTransactionExecutionTime(len(txnPoolNodeSet)) + delay next_checkpoint = tconf.CHK_FREQ looper.run(eventually(check_for_nodes, txnPoolNodeSet, check_stable_checkpoint, next_checkpoint, retryWait=1, timeout=timeout)) check_for_nodes(txnPoolNodeSet, check_num_unstable_checkpoints, 0)
import pytest from stp_core.common.log import getlogger from plenum.test.node_catchup.helper import waitNodeDataEquality from plenum.common.util import randomString from plenum.test.test_node import checkNodesConnected from plenum.test.pool_transactions.helper import sdk_add_new_steward_and_node from plenum.test import waits logger = getlogger() @pytest.fixture(scope="function", autouse=True) def limitTestRunningTime(): return 150 def add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, all_plugins_path, name=None): node_name = name or randomString(5) new_steward_name = "testClientSteward" + randomString(3) new_steward_wallet_handle, new_node = \ sdk_add_new_steward_and_node(looper, sdk_pool_handle, sdk_wallet_steward, new_steward_name, node_name, tdir, tconf, all_plugins_path) nodes.append(new_node) looper.run(checkNodesConnected(nodes, customTimeout=60)) timeout = waits.expectedPoolCatchupTime(nodeCount=len(nodes)) waitNodeDataEquality(looper, new_node, *nodes[:-1], customTimeout=timeout, exclude_from_check=['check_last_ordered_3pc_backup']) return new_node def test_add_node_with_f_changed(looper, txnPoolNodeSet, tdir, tconf, allPluginsPath, sdk_pool_handle, sdk_wallet_steward, limitTestRunningTime): nodes = txnPoolNodeSet add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node5") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node6") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node7") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node8") # check that all nodes have equal number of replica assert len(set([n.replicas.num_replicas for n in txnPoolNodeSet])) == 1 assert txnPoolNodeSet[-1].replicas.num_replicas == txnPoolNodeSet[-1].requiredNumberOfInstances
evernym/plenum
plenum/test/primary_selection/test_add_node_with_f_changed.py
plenum/test/checkpoints/test_stable_checkpoint1.py
import pytest from orderedset._orderedset import OrderedSet from plenum.common.event_bus import InternalBus from plenum.common.messages.node_messages import PrePrepare from plenum.common.startable import Mode from plenum.common.constants import POOL_LEDGER_ID, DOMAIN_LEDGER_ID, CURRENT_PROTOCOL_VERSION, AUDIT_LEDGER_ID, \ TXN_PAYLOAD, TXN_PAYLOAD_DATA, AUDIT_TXN_VIEW_NO, AUDIT_TXN_PP_SEQ_NO, AUDIT_TXN_DIGEST from plenum.common.timer import QueueTimer from plenum.common.util import get_utc_epoch from plenum.server.batch_handlers.node_reg_handler import NodeRegHandler from plenum.server.consensus.primary_selector import RoundRobinConstantNodesPrimariesSelector from plenum.server.database_manager import DatabaseManager from plenum.server.propagator import Requests from plenum.server.quorums import Quorums from plenum.server.replica import Replica from plenum.test.conftest import getValueFromModule from plenum.test.helper import MockTimestamp, sdk_random_request_objects, create_pre_prepare_params, \ create_prepare_from_pre_prepare from plenum.test.testing_utils import FakeSomething from plenum.test.bls.conftest import fake_state_root_hash, fake_multi_sig, fake_multi_sig_value class ReplicaFakeNode(FakeSomething): def __init__(self, viewNo, quorums, ledger_ids): node_names = ["Alpha", "Beta", "Gamma", "Delta"] node_stack = FakeSomething( name="fake stack", connecteds=set(node_names) ) self.replicas = [] self.viewNo = viewNo audit_ledger = FakeSomething(size=0, get_last_txn=lambda *args: None, getAllTxn=lambda *args, **kwargs: []) db_manager = DatabaseManager() db_manager.register_new_database(AUDIT_LEDGER_ID, audit_ledger) super().__init__( name="fake node", ledger_ids=ledger_ids, _viewNo=viewNo, quorums=quorums, nodestack=node_stack, utc_epoch=lambda *args: get_utc_epoch(), mode=Mode.participating, view_change_in_progress=False, monitor=FakeSomething(isMasterDegraded=lambda: False), requests=Requests(), onBatchCreated=lambda self, *args, **kwargs: True, applyReq=lambda self, *args, **kwargs: True, primaries=[], get_validators=lambda: [], db_manager=db_manager, write_manager=FakeSomething(database_manager=db_manager, node_reg_handler=NodeRegHandler(db_manager), apply_request=lambda req, cons_time: None), timer=QueueTimer(), poolManager=FakeSomething(node_names_ordered_by_rank=lambda: node_names), primaries_selector=RoundRobinConstantNodesPrimariesSelector(node_names) ) @property def viewNo(self): return self._viewNo @viewNo.setter def viewNo(self, viewNo): self._viewNo = viewNo for replica in self.replicas: replica._consensus_data.viewNo = viewNo @property def is_synced(self) -> bool: return Mode.is_done_syncing(self.mode) @property def isParticipating(self) -> bool: return self.mode == Mode.participating def add_replica(self, replica): self.replicas.append(replica) for replica in self.replicas: replica._consensus_data.view_no = self.viewNo @pytest.fixture(scope='function', params=[0, 10]) def viewNo(tconf, request): return request.param @pytest.fixture(scope='function') def ledger_ids(): return [POOL_LEDGER_ID] @pytest.fixture(scope='function', params=[0]) def inst_id(request): return request.param @pytest.fixture(scope="function") def mock_timestamp(): return get_utc_epoch @pytest.fixture() def fake_requests(): return sdk_random_request_objects(10, identifier="fake_did", protocol_version=CURRENT_PROTOCOL_VERSION) @pytest.fixture() def txn_roots(): return ["AAAgqga9DNr4bjH57Rdq6BRtvCN1PV9UX5Mpnm9gbMAZ", "BBBJmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio", "CCCJmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio", "DDDJmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio"] @pytest.fixture() def state_roots(fake_state_root_hash): return ["EuDgqga9DNr4bjH57Rdq6BRtvCN1PV9UX5Mpnm9gbMAZ", fake_state_root_hash, "D95JmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio", None] @pytest.fixture(scope='function') def replica(tconf, viewNo, inst_id, ledger_ids, mock_timestamp, fake_requests, txn_roots, state_roots, request): node = ReplicaFakeNode(viewNo=viewNo, quorums=Quorums(getValueFromModule(request, 'nodeCount', default=4)), ledger_ids=ledger_ids) bls_bft_replica = FakeSomething( gc=lambda *args: None, update_pre_prepare=lambda params, l_id: params, validate_pre_prepare=lambda a, b: None, validate_prepare=lambda a, b: None, update_prepare=lambda a, b: a, process_prepare=lambda a, b: None, process_pre_prepare=lambda a, b: None, process_order=lambda *args: None ) replica = Replica( node, instId=inst_id, isMaster=inst_id == 0, config=tconf, bls_bft_replica=bls_bft_replica, get_current_time=mock_timestamp, get_time_for_3pc_batch=mock_timestamp ) node.add_replica(replica) ReplicaFakeNode.master_last_ordered_3PC = replica.last_ordered_3pc replica._ordering_service.last_accepted_pre_prepare_time = replica.get_time_for_3pc_batch() replica.primaryName = "Alpha:{}".format(replica.instId) replica.primaryNames[replica.viewNo] = replica.primaryName replica._ordering_service.get_txn_root_hash = lambda ledger, to_str=False: txn_roots[ledger] replica._ordering_service.get_state_root_hash = lambda ledger, to_str=False: state_roots[ledger] replica._ordering_service._revert = lambda ledgerId, stateRootHash, reqCount: None replica._ordering_service.post_batch_creation = lambda three_pc_batch: None replica._ordering_service.requestQueues[DOMAIN_LEDGER_ID] = OrderedSet() replica._ordering_service._get_primaries_for_ordered = lambda pp: [replica.primaryName] replica._ordering_service._get_node_reg_for_ordered = lambda pp: ["Alpha", "Beta", "Gamma", "Delta"] def reportSuspiciousNodeEx(ex): assert False, ex replica.node.reportSuspiciousNodeEx = reportSuspiciousNodeEx return replica @pytest.fixture(scope='function') def primary_replica(replica): replica.primaryName = replica.name return replica @pytest.fixture(scope='function') def replica_with_requests(replica, fake_requests): replica._ordering_service._apply_pre_prepare = lambda a: (fake_requests, [], [], False) for req in fake_requests: replica._ordering_service.requestQueues[DOMAIN_LEDGER_ID].add(req.key) replica.requests.add(req) replica.requests.set_finalised(req) return replica @pytest.fixture(scope="function", params=['BLS_not_None', 'BLS_None']) def multi_sig(fake_multi_sig, request): if request.param == 'BLS_None': return None return fake_multi_sig @pytest.fixture(scope="function") def pre_prepare(replica, state_roots, txn_roots, multi_sig, fake_requests): params = create_pre_prepare_params(state_root=state_roots[DOMAIN_LEDGER_ID], ledger_id=DOMAIN_LEDGER_ID, txn_root=txn_roots[DOMAIN_LEDGER_ID], bls_multi_sig=multi_sig, view_no=replica.viewNo, inst_id=replica.instId, pool_state_root=state_roots[POOL_LEDGER_ID], audit_txn_root=txn_roots[AUDIT_LEDGER_ID], reqs=fake_requests) pp = PrePrepare(*params) return pp @pytest.fixture(scope="function") def prepare(pre_prepare): return create_prepare_from_pre_prepare(pre_prepare)
import pytest from stp_core.common.log import getlogger from plenum.test.node_catchup.helper import waitNodeDataEquality from plenum.common.util import randomString from plenum.test.test_node import checkNodesConnected from plenum.test.pool_transactions.helper import sdk_add_new_steward_and_node from plenum.test import waits logger = getlogger() @pytest.fixture(scope="function", autouse=True) def limitTestRunningTime(): return 150 def add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, all_plugins_path, name=None): node_name = name or randomString(5) new_steward_name = "testClientSteward" + randomString(3) new_steward_wallet_handle, new_node = \ sdk_add_new_steward_and_node(looper, sdk_pool_handle, sdk_wallet_steward, new_steward_name, node_name, tdir, tconf, all_plugins_path) nodes.append(new_node) looper.run(checkNodesConnected(nodes, customTimeout=60)) timeout = waits.expectedPoolCatchupTime(nodeCount=len(nodes)) waitNodeDataEquality(looper, new_node, *nodes[:-1], customTimeout=timeout, exclude_from_check=['check_last_ordered_3pc_backup']) return new_node def test_add_node_with_f_changed(looper, txnPoolNodeSet, tdir, tconf, allPluginsPath, sdk_pool_handle, sdk_wallet_steward, limitTestRunningTime): nodes = txnPoolNodeSet add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node5") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node6") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node7") add_new_node(looper, nodes, sdk_pool_handle, sdk_wallet_steward, tdir, tconf, allPluginsPath, name="Node8") # check that all nodes have equal number of replica assert len(set([n.replicas.num_replicas for n in txnPoolNodeSet])) == 1 assert txnPoolNodeSet[-1].replicas.num_replicas == txnPoolNodeSet[-1].requiredNumberOfInstances
evernym/plenum
plenum/test/primary_selection/test_add_node_with_f_changed.py
plenum/test/replica/conftest.py
# -*- coding: utf-8 -*- import numpy as np import pandas as pd from ..compat import DTYPE __all__ = [ 'load_airpassengers' ] def load_airpassengers(as_series=False, dtype=DTYPE): """Monthly airline passengers. The classic Box & Jenkins airline data. Monthly totals of international airline passengers, 1949 to 1960. Parameters ---------- as_series : bool, optional (default=False) Whether to return a Pandas series. If False, will return a 1d numpy array. dtype : type, optional (default=np.float64) The type to return for the array. Default is np.float64, which is used throughout the package as the default type. Returns ------- rslt : array-like, shape=(n_samples,) The time series vector. Examples -------- >>> from pmdarima.datasets import load_airpassengers >>> load_airpassengers() # doctest: +SKIP np.array([ 112, 118, 132, 129, 121, 135, 148, 148, 136, 119, 104, 118, 115, 126, 141, 135, 125, 149, 170, 170, 158, 133, 114, 140, 145, 150, 178, 163, 172, 178, 199, 199, 184, 162, 146, 166, 171, 180, 193, 181, 183, 218, 230, 242, 209, 191, 172, 194, 196, 196, 236, 235, 229, 243, 264, 272, 237, 211, 180, 201, 204, 188, 235, 227, 234, 264, 302, 293, 259, 229, 203, 229, 242, 233, 267, 269, 270, 315, 364, 347, 312, 274, 237, 278, 284, 277, 317, 313, 318, 374, 413, 405, 355, 306, 271, 306, 315, 301, 356, 348, 355, 422, 465, 467, 404, 347, 305, 336, 340, 318, 362, 348, 363, 435, 491, 505, 404, 359, 310, 337, 360, 342, 406, 396, 420, 472, 548, 559, 463, 407, 362, 405, 417, 391, 419, 461, 472, 535, 622, 606, 508, 461, 390, 432]) >>> load_airpassengers(True).head() 0 112.0 1 118.0 2 132.0 3 129.0 4 121.0 dtype: float64 Notes ----- This is monthly data, so *m* should be set to 12 when using in a seasonal context. References ---------- .. [1] Box, G. E. P., Jenkins, G. M. and Reinsel, G. C. (1976) "Time Series Analysis, Forecasting and Control. Third Edition." Holden-Day. Series G. """ rslt = np.array([ 112, 118, 132, 129, 121, 135, 148, 148, 136, 119, 104, 118, 115, 126, 141, 135, 125, 149, 170, 170, 158, 133, 114, 140, 145, 150, 178, 163, 172, 178, 199, 199, 184, 162, 146, 166, 171, 180, 193, 181, 183, 218, 230, 242, 209, 191, 172, 194, 196, 196, 236, 235, 229, 243, 264, 272, 237, 211, 180, 201, 204, 188, 235, 227, 234, 264, 302, 293, 259, 229, 203, 229, 242, 233, 267, 269, 270, 315, 364, 347, 312, 274, 237, 278, 284, 277, 317, 313, 318, 374, 413, 405, 355, 306, 271, 306, 315, 301, 356, 348, 355, 422, 465, 467, 404, 347, 305, 336, 340, 318, 362, 348, 363, 435, 491, 505, 404, 359, 310, 337, 360, 342, 406, 396, 420, 472, 548, 559, 463, 407, 362, 405, 417, 391, 419, 461, 472, 535, 622, 606, 508, 461, 390, 432 ]).astype(dtype) if as_series: return pd.Series(rslt) return rslt
# -*- coding: utf-8 -*- from pmdarima.arima.auto import StepwiseContext, auto_arima from pmdarima.arima._context import ContextStore, ContextType from pmdarima.arima import _context as context_lib from pmdarima.datasets import load_lynx, load_wineind from unittest import mock import threading import collections import pytest import warnings lynx = load_lynx() wineind = load_wineind() # test StepwiseContext parameter validation @pytest.mark.parametrize( 'max_steps,max_dur', [ pytest.param(-1, None), pytest.param(0, None), pytest.param(1001, None), pytest.param(1100, None), pytest.param(None, -1), pytest.param(None, 0), ]) def test_stepwise_context_args(max_steps, max_dur): with pytest.raises(ValueError): StepwiseContext(max_steps=max_steps, max_dur=max_dur) # test auto_arima stepwise run with StepwiseContext def test_auto_arima_with_stepwise_context(): samp = lynx[:8] with StepwiseContext(max_steps=3, max_dur=30): with pytest.warns(UserWarning) as uw: auto_arima(samp, suppress_warnings=False, stepwise=True, error_action='ignore') # assert that max_steps were taken assert any(str(w.message) .startswith('stepwise search has reached the ' 'maximum number of tries') for w in uw) # test effective context info in nested context scenario def test_nested_context(): ctx1_data = {'max_dur': 30} ctx2_data = {'max_steps': 5} ctx1 = StepwiseContext(**ctx1_data) ctx2 = StepwiseContext(**ctx2_data) with ctx1, ctx2: effective_ctx_data = ContextStore.get_or_empty( ContextType.STEPWISE) expected_ctx_data = ctx1_data.copy() expected_ctx_data.update(ctx2_data) assert all(effective_ctx_data[key] == expected_ctx_data[key] for key in expected_ctx_data.keys()) assert all(effective_ctx_data[key] == expected_ctx_data[key] for key in effective_ctx_data.keys()) # Test a context honors the max duration def test_max_dur(): # set arbitrarily low to guarantee will always pass after one iter with StepwiseContext(max_dur=.5), \ pytest.warns(UserWarning) as uw: auto_arima(lynx, stepwise=True) # assert that max_dur was reached assert any(str(w.message) .startswith('early termination') for w in uw) # Test that a context after the first will not inherit the first's attrs def test_subsequent_contexts(): # Force a very fast fit with StepwiseContext(max_dur=.5), \ pytest.warns(UserWarning): auto_arima(lynx, stepwise=True) # Out of scope, should be EMPTY ctx = ContextStore.get_or_empty(ContextType.STEPWISE) assert ctx.get_type() is ContextType.EMPTY # Now show that we DON'T hit early termination by time here with StepwiseContext(max_steps=100), \ warnings.catch_warnings(record=True) as uw: ctx = ContextStore.get_or_empty(ContextType.STEPWISE) assert ctx.get_type() is ContextType.STEPWISE assert ctx.max_dur is None auto_arima(lynx, stepwise=True) # assert that max_dur was NOT reached if uw: assert not any(str(w.message) .startswith('early termination') for w in uw) # test param validation of ContextStore's add, get and remove members def test_add_get_remove_context_args(): with pytest.raises(ValueError): ContextStore._add_context(None) with pytest.raises(ValueError): ContextStore._remove_context(None) with pytest.raises(ValueError): ContextStore.get_context(None) def test_context_store_accessible_across_threads(): # Make sure it's completely empty by patching it d = {} with mock.patch('pmdarima.arima._context._ctx.store', d): # pushes onto the Context Store def push(n): # n is the number of times this has been executed before. If > 0, # assert there is a context there if n > 0: assert len(context_lib._ctx.store[ContextType.STEPWISE]) == n else: context_lib._ctx.store[ContextType.STEPWISE] = \ collections.deque() new_ctx = StepwiseContext() context_lib._ctx.store[ContextType.STEPWISE].append(new_ctx) assert len(context_lib._ctx.store[ContextType.STEPWISE]) == n + 1 for i in range(5): t = threading.Thread(target=push, args=(i,)) t.start() t.join(1) # it shouldn't take even close to this time # Assert the mock has lifted assert context_lib._ctx.store is not d
tgsmith61591/pyramid
pmdarima/arima/tests/test_context.py
pmdarima/datasets/airpassengers.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst import contextlib import re import sys import inspect import os from collections import OrderedDict from operator import itemgetter import numpy as np __all__ = ['register_reader', 'register_writer', 'register_identifier', 'identify_format', 'get_reader', 'get_writer', 'read', 'write', 'get_formats', 'IORegistryError', 'delay_doc_updates', 'UnifiedReadWriteMethod', 'UnifiedReadWrite'] __doctest_skip__ = ['register_identifier'] _readers = OrderedDict() _writers = OrderedDict() _identifiers = OrderedDict() PATH_TYPES = (str, os.PathLike) class IORegistryError(Exception): """Custom error for registry clashes. """ pass # If multiple formats are added to one class the update of the docs is quite # expensive. Classes for which the doc update is temporarly delayed are added # to this set. _delayed_docs_classes = set() @contextlib.contextmanager def delay_doc_updates(cls): """Contextmanager to disable documentation updates when registering reader and writer. The documentation is only built once when the contextmanager exits. .. versionadded:: 1.3 Parameters ---------- cls : class Class for which the documentation updates should be delayed. Notes ----- Registering multiple readers and writers can cause significant overhead because the documentation of the corresponding ``read`` and ``write`` methods are build every time. .. warning:: This contextmanager is experimental and may be replaced by a more general approach. Examples -------- see for example the source code of ``astropy.table.__init__``. """ _delayed_docs_classes.add(cls) yield _delayed_docs_classes.discard(cls) _update__doc__(cls, 'read') _update__doc__(cls, 'write') def get_formats(data_class=None, readwrite=None): """ Get the list of registered I/O formats as a Table. Parameters ---------- data_class : class, optional Filter readers/writer to match data class (default = all classes). readwrite : str or None, optional Search only for readers (``"Read"``) or writers (``"Write"``). If None search for both. Default is None. .. versionadded:: 1.3 Returns ------- format_table : :class:`~astropy.table.Table` Table of available I/O formats. """ from astropy.table import Table format_classes = sorted(set(_readers) | set(_writers), key=itemgetter(0)) rows = [] for format_class in format_classes: if (data_class is not None and not _is_best_match( data_class, format_class[1], format_classes)): continue has_read = 'Yes' if format_class in _readers else 'No' has_write = 'Yes' if format_class in _writers else 'No' has_identify = 'Yes' if format_class in _identifiers else 'No' # Check if this is a short name (e.g. 'rdb') which is deprecated in # favor of the full 'ascii.rdb'. ascii_format_class = ('ascii.' + format_class[0], format_class[1]) deprecated = 'Yes' if ascii_format_class in format_classes else '' rows.append((format_class[1].__name__, format_class[0], has_read, has_write, has_identify, deprecated)) if readwrite is not None: if readwrite == 'Read': rows = [row for row in rows if row[2] == 'Yes'] elif readwrite == 'Write': rows = [row for row in rows if row[3] == 'Yes'] else: raise ValueError('unrecognized value for "readwrite": {0}.\n' 'Allowed are "Read" and "Write" and None.') # Sorting the list of tuples is much faster than sorting it after the table # is created. (#5262) if rows: # Indices represent "Data Class", "Deprecated" and "Format". data = list(zip(*sorted(rows, key=itemgetter(0, 5, 1)))) else: data = None format_table = Table(data, names=('Data class', 'Format', 'Read', 'Write', 'Auto-identify', 'Deprecated')) if not np.any(format_table['Deprecated'] == 'Yes'): format_table.remove_column('Deprecated') return format_table def _update__doc__(data_class, readwrite): """ Update the docstring to include all the available readers / writers for the ``data_class.read`` or ``data_class.write`` functions (respectively). """ FORMATS_TEXT = 'The available built-in formats are:' # Get the existing read or write method and its docstring class_readwrite_func = getattr(data_class, readwrite) if not isinstance(class_readwrite_func.__doc__, str): # No docstring--could just be test code, or possibly code compiled # without docstrings return lines = class_readwrite_func.__doc__.splitlines() # Find the location of the existing formats table if it exists sep_indices = [ii for ii, line in enumerate(lines) if FORMATS_TEXT in line] if sep_indices: # Chop off the existing formats table, including the initial blank line chop_index = sep_indices[0] lines = lines[:chop_index] # Find the minimum indent, skipping the first line because it might be odd matches = [re.search(r'(\S)', line) for line in lines[1:]] left_indent = ' ' * min(match.start() for match in matches if match) # Get the available unified I/O formats for this class # Include only formats that have a reader, and drop the 'Data class' column format_table = get_formats(data_class, readwrite.capitalize()) format_table.remove_column('Data class') # Get the available formats as a table, then munge the output of pformat() # a bit and put it into the docstring. new_lines = format_table.pformat(max_lines=-1, max_width=80) table_rst_sep = re.sub('-', '=', new_lines[1]) new_lines[1] = table_rst_sep new_lines.insert(0, table_rst_sep) new_lines.append(table_rst_sep) # Check for deprecated names and include a warning at the end. if 'Deprecated' in format_table.colnames: new_lines.extend(['', 'Deprecated format names like ``aastex`` will be ' 'removed in a future version. Use the full ', 'name (e.g. ``ascii.aastex``) instead.']) new_lines = [FORMATS_TEXT, ''] + new_lines lines.extend([left_indent + line for line in new_lines]) # Depending on Python version and whether class_readwrite_func is # an instancemethod or classmethod, one of the following will work. if isinstance(class_readwrite_func, UnifiedReadWrite): class_readwrite_func.__class__.__doc__ = '\n'.join(lines) else: try: class_readwrite_func.__doc__ = '\n'.join(lines) except AttributeError: class_readwrite_func.__func__.__doc__ = '\n'.join(lines) def register_reader(data_format, data_class, function, force=False, priority=0): """ Register a reader function. Parameters ---------- data_format : str The data format identifier. This is the string that will be used to specify the data type when reading. data_class : class The class of the object that the reader produces. function : function The function to read in a data object. force : bool, optional Whether to override any existing function if already present. Default is ``False``. priority : int, optional The priority of the reader, used to compare possible formats when trying to determine the best reader to use. Higher priorities are preferred over lower priorities, with the default priority being 0 (negative numbers are allowed though). """ if not (data_format, data_class) in _readers or force: _readers[(data_format, data_class)] = function, priority else: raise IORegistryError("Reader for format '{}' and class '{}' is " 'already defined' ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'read') def unregister_reader(data_format, data_class): """ Unregister a reader function Parameters ---------- data_format : str The data format identifier. data_class : class The class of the object that the reader produces. """ if (data_format, data_class) in _readers: _readers.pop((data_format, data_class)) else: raise IORegistryError("No reader defined for format '{}' and class '{}'" ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'read') def register_writer(data_format, data_class, function, force=False, priority=0): """ Register a table writer function. Parameters ---------- data_format : str The data format identifier. This is the string that will be used to specify the data type when writing. data_class : class The class of the object that can be written. function : function The function to write out a data object. force : bool, optional Whether to override any existing function if already present. Default is ``False``. priority : int, optional The priority of the writer, used to compare possible formats when trying to determine the best writer to use. Higher priorities are preferred over lower priorities, with the default priority being 0 (negative numbers are allowed though). """ if not (data_format, data_class) in _writers or force: _writers[(data_format, data_class)] = function, priority else: raise IORegistryError("Writer for format '{}' and class '{}' is " 'already defined' ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'write') def unregister_writer(data_format, data_class): """ Unregister a writer function Parameters ---------- data_format : str The data format identifier. data_class : class The class of the object that can be written. """ if (data_format, data_class) in _writers: _writers.pop((data_format, data_class)) else: raise IORegistryError("No writer defined for format '{}' and class '{}'" ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'write') def register_identifier(data_format, data_class, identifier, force=False): """ Associate an identifier function with a specific data type. Parameters ---------- data_format : str The data format identifier. This is the string that is used to specify the data type when reading/writing. data_class : class The class of the object that can be written. identifier : function A function that checks the argument specified to `read` or `write` to determine whether the input can be interpreted as a table of type ``data_format``. This function should take the following arguments: - ``origin``: A string ``"read"`` or ``"write"`` identifying whether the file is to be opened for reading or writing. - ``path``: The path to the file. - ``fileobj``: An open file object to read the file's contents, or `None` if the file could not be opened. - ``*args``: Positional arguments for the `read` or `write` function. - ``**kwargs``: Keyword arguments for the `read` or `write` function. One or both of ``path`` or ``fileobj`` may be `None`. If they are both `None`, the identifier will need to work from ``args[0]``. The function should return True if the input can be identified as being of format ``data_format``, and False otherwise. force : bool, optional Whether to override any existing function if already present. Default is ``False``. Examples -------- To set the identifier based on extensions, for formats that take a filename as a first argument, you can do for example:: >>> def my_identifier(*args, **kwargs): ... return isinstance(args[0], str) and args[0].endswith('.tbl') >>> register_identifier('ipac', Table, my_identifier) """ if not (data_format, data_class) in _identifiers or force: _identifiers[(data_format, data_class)] = identifier else: raise IORegistryError("Identifier for format '{}' and class '{}' is " 'already defined'.format(data_format, data_class.__name__)) def unregister_identifier(data_format, data_class): """ Unregister an identifier function Parameters ---------- data_format : str The data format identifier. data_class : class The class of the object that can be read/written. """ if (data_format, data_class) in _identifiers: _identifiers.pop((data_format, data_class)) else: raise IORegistryError("No identifier defined for format '{}' and class" " '{}'".format(data_format, data_class.__name__)) def identify_format(origin, data_class_required, path, fileobj, args, kwargs): """Loop through identifiers to see which formats match. Parameters ---------- origin : str A string ``"read`` or ``"write"`` identifying whether the file is to be opened for reading or writing. data_class_required : object The specified class for the result of `read` or the class that is to be written. path : str or path-like or None The path to the file or None. fileobj : file-like or None. An open file object to read the file's contents, or ``None`` if the file could not be opened. args : sequence Positional arguments for the `read` or `write` function. Note that these must be provided as sequence. kwargs : dict-like Keyword arguments for the `read` or `write` function. Note that this parameter must be `dict`-like. Returns ------- valid_formats : list List of matching formats. """ valid_formats = [] for data_format, data_class in _identifiers: if _is_best_match(data_class_required, data_class, _identifiers): if _identifiers[(data_format, data_class)]( origin, path, fileobj, *args, **kwargs): valid_formats.append(data_format) return valid_formats def _get_format_table_str(data_class, readwrite): format_table = get_formats(data_class, readwrite=readwrite) format_table.remove_column('Data class') format_table_str = '\n'.join(format_table.pformat(max_lines=-1)) return format_table_str def get_reader(data_format, data_class): """Get reader for ``data_format``. Parameters ---------- data_format : str The data format identifier. This is the string that is used to specify the data type when reading/writing. data_class : class The class of the object that can be written. Returns ------- reader : callable The registered reader function for this format and class. """ readers = [(fmt, cls) for fmt, cls in _readers if fmt == data_format] for reader_format, reader_class in readers: if _is_best_match(data_class, reader_class, readers): return _readers[(reader_format, reader_class)][0] else: format_table_str = _get_format_table_str(data_class, 'Read') raise IORegistryError( "No reader defined for format '{}' and class '{}'.\n\nThe " "available formats are:\n\n{}".format( data_format, data_class.__name__, format_table_str)) def get_writer(data_format, data_class): """Get writer for ``data_format``. Parameters ---------- data_format : str The data format identifier. This is the string that is used to specify the data type when reading/writing. data_class : class The class of the object that can be written. Returns ------- writer : callable The registered writer function for this format and class. """ writers = [(fmt, cls) for fmt, cls in _writers if fmt == data_format] for writer_format, writer_class in writers: if _is_best_match(data_class, writer_class, writers): return _writers[(writer_format, writer_class)][0] else: format_table_str = _get_format_table_str(data_class, 'Write') raise IORegistryError( "No writer defined for format '{}' and class '{}'.\n\nThe " "available formats are:\n\n{}".format( data_format, data_class.__name__, format_table_str)) def read(cls, *args, format=None, cache=False, **kwargs): """ Read in data. The arguments passed to this method depend on the format. """ ctx = None try: if format is None: path = None fileobj = None if len(args): if isinstance(args[0], PATH_TYPES) and not os.path.isdir(args[0]): from astropy.utils.data import get_readable_fileobj # path might be a os.PathLike object if isinstance(args[0], os.PathLike): args = (os.fspath(args[0]),) + args[1:] path = args[0] try: ctx = get_readable_fileobj(args[0], encoding='binary', cache=cache) fileobj = ctx.__enter__() except OSError: raise except Exception: fileobj = None else: args = [fileobj] + list(args[1:]) elif hasattr(args[0], 'read'): path = None fileobj = args[0] format = _get_valid_format( 'read', cls, path, fileobj, args, kwargs) reader = get_reader(format, cls) data = reader(*args, **kwargs) if not isinstance(data, cls): # User has read with a subclass where only the parent class is # registered. This returns the parent class, so try coercing # to desired subclass. try: data = cls(data) except Exception: raise TypeError('could not convert reader output to {} ' 'class.'.format(cls.__name__)) finally: if ctx is not None: ctx.__exit__(*sys.exc_info()) return data def write(data, *args, format=None, **kwargs): """ Write out data. The arguments passed to this method depend on the format. """ if format is None: path = None fileobj = None if len(args): if isinstance(args[0], PATH_TYPES): # path might be a os.PathLike object if isinstance(args[0], os.PathLike): args = (os.fspath(args[0]),) + args[1:] path = args[0] fileobj = None elif hasattr(args[0], 'read'): path = None fileobj = args[0] format = _get_valid_format( 'write', data.__class__, path, fileobj, args, kwargs) writer = get_writer(format, data.__class__) writer(data, *args, **kwargs) def _is_best_match(class1, class2, format_classes): """ Determine if class2 is the "best" match for class1 in the list of classes. It is assumed that (class2 in classes) is True. class2 is the the best match if: - ``class1`` is a subclass of ``class2`` AND - ``class2`` is the nearest ancestor of ``class1`` that is in classes (which includes the case that ``class1 is class2``) """ if issubclass(class1, class2): classes = {cls for fmt, cls in format_classes} for parent in class1.__mro__: if parent is class2: # class2 is closest registered ancestor return True if parent in classes: # class2 was superceded return False return False def _get_valid_format(mode, cls, path, fileobj, args, kwargs): """ Returns the first valid format that can be used to read/write the data in question. Mode can be either 'read' or 'write'. """ valid_formats = identify_format(mode, cls, path, fileobj, args, kwargs) if len(valid_formats) == 0: format_table_str = _get_format_table_str(cls, mode.capitalize()) raise IORegistryError("Format could not be identified based on the" " file name or contents, please provide a" " 'format' argument.\n" "The available formats are:\n" "{}".format(format_table_str)) elif len(valid_formats) > 1: return _get_highest_priority_format(mode, cls, valid_formats) return valid_formats[0] def _get_highest_priority_format(mode, cls, valid_formats): """ Returns the reader or writer with the highest priority. If it is a tie, error. """ if mode == "read": format_dict = _readers mode_loader = "reader" elif mode == "write": format_dict = _writers mode_loader = "writer" best_formats = [] current_priority = - np.inf for format in valid_formats: try: _, priority = format_dict[(format, cls)] except KeyError: # We could throw an exception here, but get_reader/get_writer handle # this case better, instead maximally deprioritise the format. priority = - np.inf if priority == current_priority: best_formats.append(format) elif priority > current_priority: best_formats = [format] current_priority = priority if len(best_formats) > 1: raise IORegistryError("Format is ambiguous - options are: {}".format( ', '.join(sorted(valid_formats, key=itemgetter(0))) )) return best_formats[0] class UnifiedReadWrite: """Base class for the worker object used in unified read() or write() methods. This lightweight object is created for each `read()` or `write()` call via ``read`` / ``write`` descriptors on the data object class. The key driver is to allow complete format-specific documentation of available method options via a ``help()`` method, e.g. ``Table.read.help('fits')``. Subclasses must define a ``__call__`` method which is what actually gets called when the data object ``read()`` or ``write()`` method is called. For the canonical example see the `~astropy.table.Table` class implementation (in particular the ``connect.py`` module there). Parameters ---------- instance : object Descriptor calling instance or None if no instance cls : type Descriptor calling class (either owner class or instance class) method_name : str Method name, either 'read' or 'write' """ def __init__(self, instance, cls, method_name): self._instance = instance self._cls = cls self._method_name = method_name # 'read' or 'write' def help(self, format=None, out=None): """Output help documentation for the specified unified I/O ``format``. By default the help output is printed to the console via ``pydoc.pager``. Instead one can supplied a file handle object as ``out`` and the output will be written to that handle. Parameters ---------- format : str Unified I/O format name, e.g. 'fits' or 'ascii.ecsv' out : None or path-like Output destination (default is stdout via a pager) """ cls = self._cls method_name = self._method_name # Get reader or writer function get_func = get_reader if method_name == 'read' else get_writer try: if format: read_write_func = get_func(format, cls) except IORegistryError as err: reader_doc = 'ERROR: ' + str(err) else: if format: # Format-specific header = ("{}.{}(format='{}') documentation\n" .format(cls.__name__, method_name, format)) doc = read_write_func.__doc__ else: # General docs header = f'{cls.__name__}.{method_name} general documentation\n' doc = getattr(cls, method_name).__doc__ reader_doc = re.sub('.', '=', header) reader_doc += header reader_doc += re.sub('.', '=', header) reader_doc += os.linesep if doc is not None: reader_doc += inspect.cleandoc(doc) if out is None: import pydoc pydoc.pager(reader_doc) else: out.write(reader_doc) def list_formats(self, out=None): """Print a list of available formats to console (or ``out`` filehandle) out : None or file handle object Output destination (default is stdout via a pager) """ tbl = get_formats(self._cls, self._method_name.capitalize()) del tbl['Data class'] if out is None: tbl.pprint(max_lines=-1, max_width=-1) else: out.write('\n'.join(tbl.pformat(max_lines=-1, max_width=-1))) return out class UnifiedReadWriteMethod(property): """Descriptor class for creating read() and write() methods in unified I/O. The canonical example is in the ``Table`` class, where the ``connect.py`` module creates subclasses of the ``UnifiedReadWrite`` class. These have custom ``__call__`` methods that do the setup work related to calling the registry read() or write() functions. With this, the ``Table`` class defines read and write methods as follows:: read = UnifiedReadWriteMethod(TableRead) write = UnifiedReadWriteMethod(TableWrite) Parameters ---------- func : `~astropy.io.registry.UnifiedReadWrite` subclass Class that defines read or write functionality """ # We subclass property to ensure that __set__ is defined and that, # therefore, we are a data descriptor, which cannot be overridden. # This also means we automatically inherit the __doc__ of fget (which will # be a UnifiedReadWrite subclass), and that this docstring gets recognized # and properly typeset by sphinx (which was previously an issue; see # gh-11554). # We override __get__ to pass both instance and class to UnifiedReadWrite. def __get__(self, instance, owner_cls): return self.fget(instance, owner_cls)
# Licensed under a 3-clause BSD style license - see LICENSE.rst from packaging.version import Version import pytest import numpy as np import matplotlib import matplotlib.pyplot as plt from contextlib import nullcontext from matplotlib.contour import QuadContourSet from astropy import units as u from astropy.wcs import WCS from astropy.io import fits from astropy.coordinates import SkyCoord from astropy.utils.data import get_pkg_data_filename from astropy.visualization.wcsaxes.core import WCSAxes from astropy.visualization.wcsaxes.frame import ( EllipticalFrame, RectangularFrame, RectangularFrame1D) from astropy.visualization.wcsaxes.utils import get_coord_meta from astropy.visualization.wcsaxes.transforms import CurvedTransform ft_version = Version(matplotlib.ft2font.__freetype_version__) FREETYPE_261 = ft_version == Version("2.6.1") TEX_UNAVAILABLE = not matplotlib.checkdep_usetex(True) MATPLOTLIB_GT_3_4_2 = Version(matplotlib.__version__) > Version('3.4.2') def teardown_function(function): plt.close('all') def test_grid_regression(ignore_matplotlibrc): # Regression test for a bug that meant that if the rc parameter # axes.grid was set to True, WCSAxes would crash upon initalization. plt.rc('axes', grid=True) fig = plt.figure(figsize=(3, 3)) WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) def test_format_coord_regression(ignore_matplotlibrc, tmpdir): # Regression test for a bug that meant that if format_coord was called by # Matplotlib before the axes were drawn, an error occurred. fig = plt.figure(figsize=(3, 3)) ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) assert ax.format_coord(10, 10) == "" assert ax.coords[0].format_coord(10) == "" assert ax.coords[1].format_coord(10) == "" fig.savefig(tmpdir.join('nothing').strpath) assert ax.format_coord(10, 10) == "10.0 10.0 (world)" assert ax.coords[0].format_coord(10) == "10.0" assert ax.coords[1].format_coord(10) == "10.0" TARGET_HEADER = fits.Header.fromstring(""" NAXIS = 2 NAXIS1 = 200 NAXIS2 = 100 CTYPE1 = 'RA---MOL' CRPIX1 = 500 CRVAL1 = 180.0 CDELT1 = -0.4 CUNIT1 = 'deg ' CTYPE2 = 'DEC--MOL' CRPIX2 = 400 CRVAL2 = 0.0 CDELT2 = 0.4 CUNIT2 = 'deg ' COORDSYS= 'icrs ' """, sep='\n') @pytest.mark.parametrize('grid_type', ['lines', 'contours']) def test_no_numpy_warnings(ignore_matplotlibrc, tmpdir, grid_type): ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.imshow(np.zeros((100, 200))) ax.coords.grid(color='white', grid_type=grid_type) if MATPLOTLIB_GT_3_4_2 and grid_type == 'contours': ctx = pytest.raises(AttributeError, match='dpi') else: ctx = nullcontext() with pytest.warns(None) as warning_lines, ctx: plt.savefig(tmpdir.join('test.png').strpath) # There should be no warnings raised if some pixels are outside WCS # (since this is normal). # BUT catch_warning was ignoring some warnings before, so now we # have to catch it. Otherwise, the pytest filterwarnings=error # setting in setup.cfg will fail this test. # There are actually multiple warnings but they are all similar. for w in warning_lines: w_msg = str(w.message) assert ('converting a masked element to nan' in w_msg or 'No contour levels were found within the data range' in w_msg or 'np.asscalar(a) is deprecated since NumPy v1.16' in w_msg or 'PY_SSIZE_T_CLEAN will be required' in w_msg) def test_invalid_frame_overlay(ignore_matplotlibrc): # Make sure a nice error is returned if a frame doesn't exist ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) with pytest.raises(ValueError) as exc: ax.get_coords_overlay('banana') assert exc.value.args[0] == 'Frame banana not found' with pytest.raises(ValueError) as exc: get_coord_meta('banana') assert exc.value.args[0] == 'Unknown frame: banana' def test_plot_coord_transform(ignore_matplotlibrc): twoMASS_k_header = get_pkg_data_filename('data/2MASS_k_header') twoMASS_k_header = fits.Header.fromtextfile(twoMASS_k_header) fig = plt.figure(figsize=(6, 6)) ax = fig.add_axes([0.15, 0.15, 0.8, 0.8], projection=WCS(twoMASS_k_header), aspect='equal') ax.set_xlim(-0.5, 720.5) ax.set_ylim(-0.5, 720.5) c = SkyCoord(359.76045223*u.deg, 0.26876217*u.deg) with pytest.raises(TypeError): ax.plot_coord(c, 'o', transform=ax.get_transform('galactic')) def test_set_label_properties(ignore_matplotlibrc): # Regression test to make sure that arguments passed to # set_xlabel/set_ylabel are passed to the underlying coordinate helpers ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.set_xlabel('Test x label', labelpad=2, color='red') ax.set_ylabel('Test y label', labelpad=3, color='green') assert ax.coords[0].axislabels.get_text() == 'Test x label' assert ax.coords[0].axislabels.get_minpad('b') == 2 assert ax.coords[0].axislabels.get_color() == 'red' assert ax.coords[1].axislabels.get_text() == 'Test y label' assert ax.coords[1].axislabels.get_minpad('l') == 3 assert ax.coords[1].axislabels.get_color() == 'green' assert ax.get_xlabel() == 'Test x label' assert ax.get_ylabel() == 'Test y label' GAL_HEADER = fits.Header.fromstring(""" SIMPLE = T / conforms to FITS standard BITPIX = -32 / array data type NAXIS = 3 / number of array dimensions NAXIS1 = 31 NAXIS2 = 2881 NAXIS3 = 480 EXTEND = T CTYPE1 = 'DISTMOD ' CRVAL1 = 3.5 CDELT1 = 0.5 CRPIX1 = 1.0 CTYPE2 = 'GLON-CAR' CRVAL2 = 180.0 CDELT2 = -0.125 CRPIX2 = 1.0 CTYPE3 = 'GLAT-CAR' CRVAL3 = 0.0 CDELT3 = 0.125 CRPIX3 = 241.0 """, sep='\n') def test_slicing_warnings(ignore_matplotlibrc, tmpdir): # Regression test to make sure that no warnings are emitted by the tick # locator for the sliced axis when slicing a cube. # Scalar case wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') # Angle case wcs3d = WCS(GAL_HEADER) with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 2)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') def test_plt_xlabel_ylabel(tmpdir): # Regression test for a bug that happened when using plt.xlabel # and plt.ylabel with Matplotlib 3.0 plt.subplot(projection=WCS()) plt.xlabel('Galactic Longitude') plt.ylabel('Galactic Latitude') plt.savefig(tmpdir.join('test.png').strpath) def test_grid_type_contours_transform(tmpdir): # Regression test for a bug that caused grid_type='contours' to not work # with custom transforms class CustomTransform(CurvedTransform): # We deliberately don't define the inverse, and has_inverse should # default to False. def transform(self, values): return values * 1.3 transform = CustomTransform() coord_meta = {'type': ('scalar', 'scalar'), 'unit': (u.m, u.s), 'wrap': (None, None), 'name': ('x', 'y')} fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], transform=transform, coord_meta=coord_meta) fig.add_axes(ax) ax.grid(grid_type='contours') fig.savefig(tmpdir.join('test.png').strpath) def test_plt_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # plt.imshow was called. ax = plt.subplot(projection=WCS()) plt.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_ax_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # ax.imshow was called with no origin ax = plt.subplot(projection=WCS()) ax.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_grid_contour_large_spacing(tmpdir): # Regression test for a bug that caused a crash when grid was called and # didn't produce grid lines (due e.g. to too large spacing) and was then # called again. filename = tmpdir.join('test.png').strpath ax = plt.subplot(projection=WCS()) ax.set_xlim(-0.5, 1.5) ax.set_ylim(-0.5, 1.5) ax.coords[0].set_ticks(values=[] * u.one) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) def test_contour_return(): # Regression test for a bug that caused contour and contourf to return None # instead of the contour object. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) cset = ax.contour(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) cset = ax.contourf(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) def test_contour_empty(): # Regression test for a bug that caused contour to crash if no contours # were present. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) with pytest.warns(UserWarning, match='No contour levels were found within the data range'): ax.contour(np.zeros((4, 4)), transform=ax.get_transform('world')) def test_iterate_coords(ignore_matplotlibrc, tmpdir): # Regression test for a bug that caused ax.coords to return too few axes wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) x, y, z = ax.coords def test_invalid_slices_errors(ignore_matplotlibrc): # Make sure that users get a clear message when specifying a WCS with # >2 dimensions without giving the 'slices' argument, or if the 'slices' # argument has too many/few elements. wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d) assert exc.value.args[0] == ("WCS has more than 2 pixel dimensions, so " "'slices' should be set") with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1, 2)) assert exc.value.args[0] == ("'slices' should have as many elements as " "WCS has pixel dimensions (should be 3)") wcs2d = WCS(naxis=2) wcs2d.wcs.ctype = ['x', 'y'] ax = plt.subplot(1, 1, 1, projection=wcs2d) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('x', 'y')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('y', 'x')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=['x', 'y']) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'x')) assert ax.frame_class is RectangularFrame1D wcs1d = WCS(naxis=1) wcs1d.wcs.ctype = ['x'] ax = plt.subplot(1, 1, 1, projection=wcs1d) assert ax.frame_class is RectangularFrame1D with pytest.raises(ValueError): plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'y')) EXPECTED_REPR_1 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None no 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() EXPECTED_REPR_2 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None yes 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() def test_repr(ignore_matplotlibrc): # Unit test to make sure __repr__ looks as expected wcs3d = WCS(GAL_HEADER) # Cube header has world coordinates as distance, lon, lat, so start off # by slicing in a way that we select just lon,lat: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=(1, 'x', 'y')) assert repr(ax.coords) == EXPECTED_REPR_1 # Now slice in a way that all world coordinates are still present: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) assert repr(ax.coords) == EXPECTED_REPR_2 @pytest.fixture def time_spectral_wcs_2d(): wcs = WCS(naxis=2) wcs.wcs.ctype = ['FREQ', 'TIME'] wcs.wcs.set() return wcs def test_time_wcs(time_spectral_wcs_2d): # Regression test for a bug that caused WCSAxes to error when using a WCS # with a time axis. plt.subplot(projection=time_spectral_wcs_2d) @pytest.mark.skipif('TEX_UNAVAILABLE') def test_simplify_labels_usetex(ignore_matplotlibrc, tmpdir): """Regression test for https://github.com/astropy/astropy/issues/8004.""" plt.rc('text', usetex=True) header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---MOL', 'CTYPE2': 'DEC--MOL', 'RADESYS': 'ICRS'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=EllipticalFrame, projection=wcs)) ax.set_xlim(-0.5, header['NAXIS1'] - 0.5) ax.set_ylim(-0.5, header['NAXIS2'] - 0.5) ax.coords[0].set_ticklabel(exclude_overlapping=True) ax.coords[1].set_ticklabel(exclude_overlapping=True) ax.coords[0].set_ticks(spacing=45 * u.deg) ax.coords[1].set_ticks(spacing=30 * u.deg) ax.grid() fig.savefig(tmpdir / 'plot.png') @pytest.mark.parametrize('frame_class', [RectangularFrame, EllipticalFrame]) def test_set_labels_with_coords(ignore_matplotlibrc, frame_class): """Test if ``axis.set_xlabel()`` calls the correct ``coords[i]_set_axislabel()`` in a WCS plot. Regression test for https://github.com/astropy/astropy/issues/10435. """ labels = ['RA', 'Declination'] header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---AIT', 'CTYPE2': 'DEC--AIT'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=frame_class, projection=wcs)) ax.set_xlabel(labels[0]) ax.set_ylabel(labels[1]) assert ax.get_xlabel() == labels[0] assert ax.get_ylabel() == labels[1] for i in range(2): assert ax.coords[i].get_axislabel() == labels[i] @pytest.mark.parametrize('atol', [0.2, 1.0e-8]) def test_bbox_size(atol): # Test for the size of a WCSAxes bbox (only have Matplotlib >= 3.0 now) extents = [11.38888888888889, 3.5, 576.0, 432.0] fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) fig.canvas.draw() renderer = fig.canvas.renderer ax_bbox = ax.get_tightbbox(renderer) # Enforce strict test only with reference Freetype version if atol < 0.1 and not FREETYPE_261: pytest.xfail("Exact BoundingBox dimensions are only ensured with FreeType 2.6.1") assert np.allclose(ax_bbox.extents, extents, atol=atol)
pllim/astropy
astropy/visualization/wcsaxes/tests/test_misc.py
astropy/io/registry.py
import os import abc import numpy as np __all__ = ['BaseLowLevelWCS', 'validate_physical_types'] class BaseLowLevelWCS(metaclass=abc.ABCMeta): """ Abstract base class for the low-level WCS interface. This is described in `APE 14: A shared Python interface for World Coordinate Systems <https://doi.org/10.5281/zenodo.1188875>`_. """ @property @abc.abstractmethod def pixel_n_dim(self): """ The number of axes in the pixel coordinate system. """ @property @abc.abstractmethod def world_n_dim(self): """ The number of axes in the world coordinate system. """ @property @abc.abstractmethod def world_axis_physical_types(self): """ An iterable of strings describing the physical type for each world axis. These should be names from the VO UCD1+ controlled Vocabulary (http://www.ivoa.net/documents/latest/UCDlist.html). If no matching UCD type exists, this can instead be ``"custom:xxx"``, where ``xxx`` is an arbitrary string. Alternatively, if the physical type is unknown/undefined, an element can be `None`. """ @property @abc.abstractmethod def world_axis_units(self): """ An iterable of strings given the units of the world coordinates for each axis. The strings should follow the `IVOA VOUnit standard <http://ivoa.net/documents/VOUnits/>`_ (though as noted in the VOUnit specification document, units that do not follow this standard are still allowed, but just not recommended). """ @abc.abstractmethod def pixel_to_world_values(self, *pixel_arrays): """ Convert pixel coordinates to world coordinates. This method takes `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` scalars or arrays as input, and pixel coordinates should be zero-based. Returns `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` scalars or arrays in units given by `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_units`. Note that pixel coordinates are assumed to be 0 at the center of the first pixel in each dimension. If a pixel is in a region where the WCS is not defined, NaN can be returned. The coordinates should be specified in the ``(x, y)`` order, where for an image, ``x`` is the horizontal coordinate and ``y`` is the vertical coordinate. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ def array_index_to_world_values(self, *index_arrays): """ Convert array indices to world coordinates. This is the same as `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_to_world_values` except that the indices should be given in ``(i, j)`` order, where for an image ``i`` is the row and ``j`` is the column (i.e. the opposite order to `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_to_world_values`). If `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ return self.pixel_to_world_values(*index_arrays[::-1]) @abc.abstractmethod def world_to_pixel_values(self, *world_arrays): """ Convert world coordinates to pixel coordinates. This method takes `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` scalars or arrays as input in units given by `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_units`. Returns `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` scalars or arrays. Note that pixel coordinates are assumed to be 0 at the center of the first pixel in each dimension. If a world coordinate does not have a matching pixel coordinate, NaN can be returned. The coordinates should be returned in the ``(x, y)`` order, where for an image, ``x`` is the horizontal coordinate and ``y`` is the vertical coordinate. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ def world_to_array_index_values(self, *world_arrays): """ Convert world coordinates to array indices. This is the same as `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_to_pixel_values` except that the indices should be returned in ``(i, j)`` order, where for an image ``i`` is the row and ``j`` is the column (i.e. the opposite order to `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_to_world_values`). The indices should be returned as rounded integers. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ pixel_arrays = self.world_to_pixel_values(*world_arrays) if self.pixel_n_dim == 1: pixel_arrays = (pixel_arrays,) else: pixel_arrays = pixel_arrays[::-1] array_indices = tuple(np.asarray(np.floor(pixel + 0.5), dtype=np.int_) for pixel in pixel_arrays) return array_indices[0] if self.pixel_n_dim == 1 else array_indices @property @abc.abstractmethod def world_axis_object_components(self): """ A list with `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` elements giving information on constructing high-level objects for the world coordinates. Each element of the list is a tuple with three items: * The first is a name for the world object this world array corresponds to, which *must* match the string names used in `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_classes`. Note that names might appear twice because two world arrays might correspond to a single world object (e.g. a celestial coordinate might have both “ra” and “dec” arrays, which correspond to a single sky coordinate object). * The second element is either a string keyword argument name or a positional index for the corresponding class from `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_classes`. * The third argument is a string giving the name of the property to access on the corresponding class from `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_classes` in order to get numerical values. Alternatively, this argument can be a callable Python object that taks a high-level coordinate object and returns the numerical values suitable for passing to the low-level WCS transformation methods. See the document `APE 14: A shared Python interface for World Coordinate Systems <https://doi.org/10.5281/zenodo.1188875>`_ for examples. """ @property @abc.abstractmethod def world_axis_object_classes(self): """ A dictionary giving information on constructing high-level objects for the world coordinates. Each key of the dictionary is a string key from `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_components`, and each value is a tuple with three elements or four elements: * The first element of the tuple must be a class or a string specifying the fully-qualified name of a class, which will specify the actual Python object to be created. * The second element, should be a tuple specifying the positional arguments required to initialize the class. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_components` specifies that the world coordinates should be passed as a positional argument, this this tuple should include `None` placeholders for the world coordinates. * The third tuple element must be a dictionary with the keyword arguments required to initialize the class. * Optionally, for advanced use cases, the fourth element (if present) should be a callable Python object that gets called instead of the class and gets passed the positional and keyword arguments. It should return an object of the type of the first element in the tuple. Note that we don't require the classes to be Astropy classes since there is no guarantee that Astropy will have all the classes to represent all kinds of world coordinates. Furthermore, we recommend that the output be kept as human-readable as possible. The classes used here should have the ability to do conversions by passing an instance as the first argument to the same class with different arguments (e.g. ``Time(Time(...), scale='tai')``). This is a requirement for the implementation of the high-level interface. The second and third tuple elements for each value of this dictionary can in turn contain either instances of classes, or if necessary can contain serialized versions that should take the same form as the main classes described above (a tuple with three elements with the fully qualified name of the class, then the positional arguments and the keyword arguments). For low-level API objects implemented in Python, we recommend simply returning the actual objects (not the serialized form) for optimal performance. Implementations should either always or never use serialized classes to represent Python objects, and should indicate which of these they follow using the `~astropy.wcs.wcsapi.BaseLowLevelWCS.serialized_classes` attribute. See the document `APE 14: A shared Python interface for World Coordinate Systems <https://doi.org/10.5281/zenodo.1188875>`_ for examples . """ # The following three properties have default fallback implementations, so # they are not abstract. @property def array_shape(self): """ The shape of the data that the WCS applies to as a tuple of length `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` in ``(row, column)`` order (the convention for arrays in Python). If the WCS is valid in the context of a dataset with a particular shape, then this property can be used to store the shape of the data. This can be used for example if implementing slicing of WCS objects. This is an optional property, and it should return `None` if a shape is not known or relevant. """ if self.pixel_shape is None: return None else: return self.pixel_shape[::-1] @property def pixel_shape(self): """ The shape of the data that the WCS applies to as a tuple of length `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` in ``(x, y)`` order (where for an image, ``x`` is the horizontal coordinate and ``y`` is the vertical coordinate). If the WCS is valid in the context of a dataset with a particular shape, then this property can be used to store the shape of the data. This can be used for example if implementing slicing of WCS objects. This is an optional property, and it should return `None` if a shape is not known or relevant. If you are interested in getting a shape that is comparable to that of a Numpy array, you should use `~astropy.wcs.wcsapi.BaseLowLevelWCS.array_shape` instead. """ return None @property def pixel_bounds(self): """ The bounds (in pixel coordinates) inside which the WCS is defined, as a list with `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` ``(min, max)`` tuples. The bounds should be given in ``[(xmin, xmax), (ymin, ymax)]`` order. WCS solutions are sometimes only guaranteed to be accurate within a certain range of pixel values, for example when defining a WCS that includes fitted distortions. This is an optional property, and it should return `None` if a shape is not known or relevant. """ return None @property def pixel_axis_names(self): """ An iterable of strings describing the name for each pixel axis. If an axis does not have a name, an empty string should be returned (this is the default behavior for all axes if a subclass does not override this property). Note that these names are just for display purposes and are not standardized. """ return [''] * self.pixel_n_dim @property def world_axis_names(self): """ An iterable of strings describing the name for each world axis. If an axis does not have a name, an empty string should be returned (this is the default behavior for all axes if a subclass does not override this property). Note that these names are just for display purposes and are not standardized. For standardized axis types, see `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_physical_types`. """ return [''] * self.world_n_dim @property def axis_correlation_matrix(self): """ Returns an (`~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim`, `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim`) matrix that indicates using booleans whether a given world coordinate depends on a given pixel coordinate. This defaults to a matrix where all elements are `True` in the absence of any further information. For completely independent axes, the diagonal would be `True` and all other entries `False`. """ return np.ones((self.world_n_dim, self.pixel_n_dim), dtype=bool) @property def serialized_classes(self): """ Indicates whether Python objects are given in serialized form or as actual Python objects. """ return False def _as_mpl_axes(self): """ Compatibility hook for Matplotlib and WCSAxes. With this method, one can do:: from astropy.wcs import WCS import matplotlib.pyplot as plt wcs = WCS('filename.fits') fig = plt.figure() ax = fig.add_axes([0.15, 0.1, 0.8, 0.8], projection=wcs) ... and this will generate a plot with the correct WCS coordinates on the axes. """ from astropy.visualization.wcsaxes import WCSAxes return WCSAxes, {'wcs': self} UCDS_FILE = os.path.join(os.path.dirname(__file__), 'data', 'ucds.txt') with open(UCDS_FILE) as f: VALID_UCDS = set([x.strip() for x in f.read().splitlines()[1:]]) def validate_physical_types(physical_types): """ Validate a list of physical types against the UCD1+ standard """ for physical_type in physical_types: if (physical_type is not None and physical_type not in VALID_UCDS and not physical_type.startswith('custom:')): raise ValueError( f"'{physical_type}' is not a valid IOVA UCD1+ physical type. " "It must be a string specified in the list (http://www.ivoa.net/documents/latest/UCDlist.html) " "or if no matching type exists it can be any string prepended with 'custom:'." )
# Licensed under a 3-clause BSD style license - see LICENSE.rst from packaging.version import Version import pytest import numpy as np import matplotlib import matplotlib.pyplot as plt from contextlib import nullcontext from matplotlib.contour import QuadContourSet from astropy import units as u from astropy.wcs import WCS from astropy.io import fits from astropy.coordinates import SkyCoord from astropy.utils.data import get_pkg_data_filename from astropy.visualization.wcsaxes.core import WCSAxes from astropy.visualization.wcsaxes.frame import ( EllipticalFrame, RectangularFrame, RectangularFrame1D) from astropy.visualization.wcsaxes.utils import get_coord_meta from astropy.visualization.wcsaxes.transforms import CurvedTransform ft_version = Version(matplotlib.ft2font.__freetype_version__) FREETYPE_261 = ft_version == Version("2.6.1") TEX_UNAVAILABLE = not matplotlib.checkdep_usetex(True) MATPLOTLIB_GT_3_4_2 = Version(matplotlib.__version__) > Version('3.4.2') def teardown_function(function): plt.close('all') def test_grid_regression(ignore_matplotlibrc): # Regression test for a bug that meant that if the rc parameter # axes.grid was set to True, WCSAxes would crash upon initalization. plt.rc('axes', grid=True) fig = plt.figure(figsize=(3, 3)) WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) def test_format_coord_regression(ignore_matplotlibrc, tmpdir): # Regression test for a bug that meant that if format_coord was called by # Matplotlib before the axes were drawn, an error occurred. fig = plt.figure(figsize=(3, 3)) ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) assert ax.format_coord(10, 10) == "" assert ax.coords[0].format_coord(10) == "" assert ax.coords[1].format_coord(10) == "" fig.savefig(tmpdir.join('nothing').strpath) assert ax.format_coord(10, 10) == "10.0 10.0 (world)" assert ax.coords[0].format_coord(10) == "10.0" assert ax.coords[1].format_coord(10) == "10.0" TARGET_HEADER = fits.Header.fromstring(""" NAXIS = 2 NAXIS1 = 200 NAXIS2 = 100 CTYPE1 = 'RA---MOL' CRPIX1 = 500 CRVAL1 = 180.0 CDELT1 = -0.4 CUNIT1 = 'deg ' CTYPE2 = 'DEC--MOL' CRPIX2 = 400 CRVAL2 = 0.0 CDELT2 = 0.4 CUNIT2 = 'deg ' COORDSYS= 'icrs ' """, sep='\n') @pytest.mark.parametrize('grid_type', ['lines', 'contours']) def test_no_numpy_warnings(ignore_matplotlibrc, tmpdir, grid_type): ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.imshow(np.zeros((100, 200))) ax.coords.grid(color='white', grid_type=grid_type) if MATPLOTLIB_GT_3_4_2 and grid_type == 'contours': ctx = pytest.raises(AttributeError, match='dpi') else: ctx = nullcontext() with pytest.warns(None) as warning_lines, ctx: plt.savefig(tmpdir.join('test.png').strpath) # There should be no warnings raised if some pixels are outside WCS # (since this is normal). # BUT catch_warning was ignoring some warnings before, so now we # have to catch it. Otherwise, the pytest filterwarnings=error # setting in setup.cfg will fail this test. # There are actually multiple warnings but they are all similar. for w in warning_lines: w_msg = str(w.message) assert ('converting a masked element to nan' in w_msg or 'No contour levels were found within the data range' in w_msg or 'np.asscalar(a) is deprecated since NumPy v1.16' in w_msg or 'PY_SSIZE_T_CLEAN will be required' in w_msg) def test_invalid_frame_overlay(ignore_matplotlibrc): # Make sure a nice error is returned if a frame doesn't exist ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) with pytest.raises(ValueError) as exc: ax.get_coords_overlay('banana') assert exc.value.args[0] == 'Frame banana not found' with pytest.raises(ValueError) as exc: get_coord_meta('banana') assert exc.value.args[0] == 'Unknown frame: banana' def test_plot_coord_transform(ignore_matplotlibrc): twoMASS_k_header = get_pkg_data_filename('data/2MASS_k_header') twoMASS_k_header = fits.Header.fromtextfile(twoMASS_k_header) fig = plt.figure(figsize=(6, 6)) ax = fig.add_axes([0.15, 0.15, 0.8, 0.8], projection=WCS(twoMASS_k_header), aspect='equal') ax.set_xlim(-0.5, 720.5) ax.set_ylim(-0.5, 720.5) c = SkyCoord(359.76045223*u.deg, 0.26876217*u.deg) with pytest.raises(TypeError): ax.plot_coord(c, 'o', transform=ax.get_transform('galactic')) def test_set_label_properties(ignore_matplotlibrc): # Regression test to make sure that arguments passed to # set_xlabel/set_ylabel are passed to the underlying coordinate helpers ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.set_xlabel('Test x label', labelpad=2, color='red') ax.set_ylabel('Test y label', labelpad=3, color='green') assert ax.coords[0].axislabels.get_text() == 'Test x label' assert ax.coords[0].axislabels.get_minpad('b') == 2 assert ax.coords[0].axislabels.get_color() == 'red' assert ax.coords[1].axislabels.get_text() == 'Test y label' assert ax.coords[1].axislabels.get_minpad('l') == 3 assert ax.coords[1].axislabels.get_color() == 'green' assert ax.get_xlabel() == 'Test x label' assert ax.get_ylabel() == 'Test y label' GAL_HEADER = fits.Header.fromstring(""" SIMPLE = T / conforms to FITS standard BITPIX = -32 / array data type NAXIS = 3 / number of array dimensions NAXIS1 = 31 NAXIS2 = 2881 NAXIS3 = 480 EXTEND = T CTYPE1 = 'DISTMOD ' CRVAL1 = 3.5 CDELT1 = 0.5 CRPIX1 = 1.0 CTYPE2 = 'GLON-CAR' CRVAL2 = 180.0 CDELT2 = -0.125 CRPIX2 = 1.0 CTYPE3 = 'GLAT-CAR' CRVAL3 = 0.0 CDELT3 = 0.125 CRPIX3 = 241.0 """, sep='\n') def test_slicing_warnings(ignore_matplotlibrc, tmpdir): # Regression test to make sure that no warnings are emitted by the tick # locator for the sliced axis when slicing a cube. # Scalar case wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') # Angle case wcs3d = WCS(GAL_HEADER) with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 2)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') def test_plt_xlabel_ylabel(tmpdir): # Regression test for a bug that happened when using plt.xlabel # and plt.ylabel with Matplotlib 3.0 plt.subplot(projection=WCS()) plt.xlabel('Galactic Longitude') plt.ylabel('Galactic Latitude') plt.savefig(tmpdir.join('test.png').strpath) def test_grid_type_contours_transform(tmpdir): # Regression test for a bug that caused grid_type='contours' to not work # with custom transforms class CustomTransform(CurvedTransform): # We deliberately don't define the inverse, and has_inverse should # default to False. def transform(self, values): return values * 1.3 transform = CustomTransform() coord_meta = {'type': ('scalar', 'scalar'), 'unit': (u.m, u.s), 'wrap': (None, None), 'name': ('x', 'y')} fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], transform=transform, coord_meta=coord_meta) fig.add_axes(ax) ax.grid(grid_type='contours') fig.savefig(tmpdir.join('test.png').strpath) def test_plt_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # plt.imshow was called. ax = plt.subplot(projection=WCS()) plt.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_ax_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # ax.imshow was called with no origin ax = plt.subplot(projection=WCS()) ax.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_grid_contour_large_spacing(tmpdir): # Regression test for a bug that caused a crash when grid was called and # didn't produce grid lines (due e.g. to too large spacing) and was then # called again. filename = tmpdir.join('test.png').strpath ax = plt.subplot(projection=WCS()) ax.set_xlim(-0.5, 1.5) ax.set_ylim(-0.5, 1.5) ax.coords[0].set_ticks(values=[] * u.one) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) def test_contour_return(): # Regression test for a bug that caused contour and contourf to return None # instead of the contour object. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) cset = ax.contour(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) cset = ax.contourf(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) def test_contour_empty(): # Regression test for a bug that caused contour to crash if no contours # were present. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) with pytest.warns(UserWarning, match='No contour levels were found within the data range'): ax.contour(np.zeros((4, 4)), transform=ax.get_transform('world')) def test_iterate_coords(ignore_matplotlibrc, tmpdir): # Regression test for a bug that caused ax.coords to return too few axes wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) x, y, z = ax.coords def test_invalid_slices_errors(ignore_matplotlibrc): # Make sure that users get a clear message when specifying a WCS with # >2 dimensions without giving the 'slices' argument, or if the 'slices' # argument has too many/few elements. wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d) assert exc.value.args[0] == ("WCS has more than 2 pixel dimensions, so " "'slices' should be set") with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1, 2)) assert exc.value.args[0] == ("'slices' should have as many elements as " "WCS has pixel dimensions (should be 3)") wcs2d = WCS(naxis=2) wcs2d.wcs.ctype = ['x', 'y'] ax = plt.subplot(1, 1, 1, projection=wcs2d) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('x', 'y')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('y', 'x')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=['x', 'y']) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'x')) assert ax.frame_class is RectangularFrame1D wcs1d = WCS(naxis=1) wcs1d.wcs.ctype = ['x'] ax = plt.subplot(1, 1, 1, projection=wcs1d) assert ax.frame_class is RectangularFrame1D with pytest.raises(ValueError): plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'y')) EXPECTED_REPR_1 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None no 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() EXPECTED_REPR_2 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None yes 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() def test_repr(ignore_matplotlibrc): # Unit test to make sure __repr__ looks as expected wcs3d = WCS(GAL_HEADER) # Cube header has world coordinates as distance, lon, lat, so start off # by slicing in a way that we select just lon,lat: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=(1, 'x', 'y')) assert repr(ax.coords) == EXPECTED_REPR_1 # Now slice in a way that all world coordinates are still present: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) assert repr(ax.coords) == EXPECTED_REPR_2 @pytest.fixture def time_spectral_wcs_2d(): wcs = WCS(naxis=2) wcs.wcs.ctype = ['FREQ', 'TIME'] wcs.wcs.set() return wcs def test_time_wcs(time_spectral_wcs_2d): # Regression test for a bug that caused WCSAxes to error when using a WCS # with a time axis. plt.subplot(projection=time_spectral_wcs_2d) @pytest.mark.skipif('TEX_UNAVAILABLE') def test_simplify_labels_usetex(ignore_matplotlibrc, tmpdir): """Regression test for https://github.com/astropy/astropy/issues/8004.""" plt.rc('text', usetex=True) header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---MOL', 'CTYPE2': 'DEC--MOL', 'RADESYS': 'ICRS'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=EllipticalFrame, projection=wcs)) ax.set_xlim(-0.5, header['NAXIS1'] - 0.5) ax.set_ylim(-0.5, header['NAXIS2'] - 0.5) ax.coords[0].set_ticklabel(exclude_overlapping=True) ax.coords[1].set_ticklabel(exclude_overlapping=True) ax.coords[0].set_ticks(spacing=45 * u.deg) ax.coords[1].set_ticks(spacing=30 * u.deg) ax.grid() fig.savefig(tmpdir / 'plot.png') @pytest.mark.parametrize('frame_class', [RectangularFrame, EllipticalFrame]) def test_set_labels_with_coords(ignore_matplotlibrc, frame_class): """Test if ``axis.set_xlabel()`` calls the correct ``coords[i]_set_axislabel()`` in a WCS plot. Regression test for https://github.com/astropy/astropy/issues/10435. """ labels = ['RA', 'Declination'] header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---AIT', 'CTYPE2': 'DEC--AIT'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=frame_class, projection=wcs)) ax.set_xlabel(labels[0]) ax.set_ylabel(labels[1]) assert ax.get_xlabel() == labels[0] assert ax.get_ylabel() == labels[1] for i in range(2): assert ax.coords[i].get_axislabel() == labels[i] @pytest.mark.parametrize('atol', [0.2, 1.0e-8]) def test_bbox_size(atol): # Test for the size of a WCSAxes bbox (only have Matplotlib >= 3.0 now) extents = [11.38888888888889, 3.5, 576.0, 432.0] fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) fig.canvas.draw() renderer = fig.canvas.renderer ax_bbox = ax.get_tightbbox(renderer) # Enforce strict test only with reference Freetype version if atol < 0.1 and not FREETYPE_261: pytest.xfail("Exact BoundingBox dimensions are only ensured with FreeType 2.6.1") assert np.allclose(ax_bbox.extents, extents, atol=atol)
pllim/astropy
astropy/visualization/wcsaxes/tests/test_misc.py
astropy/wcs/wcsapi/low_level_api.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Astronomical and physics constants for Astropy v4.0. See :mod:`astropy.constants` for a complete listing of constants defined in Astropy. """ import warnings from astropy.utils import find_current_module from . import utils as _utils from . import codata2018, iau2015 codata = codata2018 iaudata = iau2015 _utils._set_c(codata, iaudata, find_current_module()) # Overwrite the following for consistency. # https://github.com/astropy/astropy/issues/8920 with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Constant .*already has a definition') # Solar mass (derived from mass parameter and gravitational constant) M_sun = iau2015.IAU2015( 'M_sun', "Solar mass", iau2015.GM_sun.value / codata2018.G.value, 'kg', ((codata2018.G.uncertainty / codata2018.G.value) * (iau2015.GM_sun.value / codata2018.G.value)), f"IAU 2015 Resolution B 3 + {codata2018.G.reference}", system='si') # Jupiter mass (derived from mass parameter and gravitational constant) M_jup = iau2015.IAU2015( 'M_jup', "Jupiter mass", iau2015.GM_jup.value / codata2018.G.value, 'kg', ((codata2018.G.uncertainty / codata2018.G.value) * (iau2015.GM_jup.value / codata2018.G.value)), f"IAU 2015 Resolution B 3 + {codata2018.G.reference}", system='si') # Earth mass (derived from mass parameter and gravitational constant) M_earth = iau2015.IAU2015( 'M_earth', "Earth mass", iau2015.GM_earth.value / codata2018.G.value, 'kg', ((codata2018.G.uncertainty / codata2018.G.value) * (iau2015.GM_earth.value / codata2018.G.value)), f"IAU 2015 Resolution B 3 + {codata2018.G.reference}", system='si') # Clean up namespace del warnings del find_current_module del _utils
# Licensed under a 3-clause BSD style license - see LICENSE.rst from packaging.version import Version import pytest import numpy as np import matplotlib import matplotlib.pyplot as plt from contextlib import nullcontext from matplotlib.contour import QuadContourSet from astropy import units as u from astropy.wcs import WCS from astropy.io import fits from astropy.coordinates import SkyCoord from astropy.utils.data import get_pkg_data_filename from astropy.visualization.wcsaxes.core import WCSAxes from astropy.visualization.wcsaxes.frame import ( EllipticalFrame, RectangularFrame, RectangularFrame1D) from astropy.visualization.wcsaxes.utils import get_coord_meta from astropy.visualization.wcsaxes.transforms import CurvedTransform ft_version = Version(matplotlib.ft2font.__freetype_version__) FREETYPE_261 = ft_version == Version("2.6.1") TEX_UNAVAILABLE = not matplotlib.checkdep_usetex(True) MATPLOTLIB_GT_3_4_2 = Version(matplotlib.__version__) > Version('3.4.2') def teardown_function(function): plt.close('all') def test_grid_regression(ignore_matplotlibrc): # Regression test for a bug that meant that if the rc parameter # axes.grid was set to True, WCSAxes would crash upon initalization. plt.rc('axes', grid=True) fig = plt.figure(figsize=(3, 3)) WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) def test_format_coord_regression(ignore_matplotlibrc, tmpdir): # Regression test for a bug that meant that if format_coord was called by # Matplotlib before the axes were drawn, an error occurred. fig = plt.figure(figsize=(3, 3)) ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) assert ax.format_coord(10, 10) == "" assert ax.coords[0].format_coord(10) == "" assert ax.coords[1].format_coord(10) == "" fig.savefig(tmpdir.join('nothing').strpath) assert ax.format_coord(10, 10) == "10.0 10.0 (world)" assert ax.coords[0].format_coord(10) == "10.0" assert ax.coords[1].format_coord(10) == "10.0" TARGET_HEADER = fits.Header.fromstring(""" NAXIS = 2 NAXIS1 = 200 NAXIS2 = 100 CTYPE1 = 'RA---MOL' CRPIX1 = 500 CRVAL1 = 180.0 CDELT1 = -0.4 CUNIT1 = 'deg ' CTYPE2 = 'DEC--MOL' CRPIX2 = 400 CRVAL2 = 0.0 CDELT2 = 0.4 CUNIT2 = 'deg ' COORDSYS= 'icrs ' """, sep='\n') @pytest.mark.parametrize('grid_type', ['lines', 'contours']) def test_no_numpy_warnings(ignore_matplotlibrc, tmpdir, grid_type): ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.imshow(np.zeros((100, 200))) ax.coords.grid(color='white', grid_type=grid_type) if MATPLOTLIB_GT_3_4_2 and grid_type == 'contours': ctx = pytest.raises(AttributeError, match='dpi') else: ctx = nullcontext() with pytest.warns(None) as warning_lines, ctx: plt.savefig(tmpdir.join('test.png').strpath) # There should be no warnings raised if some pixels are outside WCS # (since this is normal). # BUT catch_warning was ignoring some warnings before, so now we # have to catch it. Otherwise, the pytest filterwarnings=error # setting in setup.cfg will fail this test. # There are actually multiple warnings but they are all similar. for w in warning_lines: w_msg = str(w.message) assert ('converting a masked element to nan' in w_msg or 'No contour levels were found within the data range' in w_msg or 'np.asscalar(a) is deprecated since NumPy v1.16' in w_msg or 'PY_SSIZE_T_CLEAN will be required' in w_msg) def test_invalid_frame_overlay(ignore_matplotlibrc): # Make sure a nice error is returned if a frame doesn't exist ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) with pytest.raises(ValueError) as exc: ax.get_coords_overlay('banana') assert exc.value.args[0] == 'Frame banana not found' with pytest.raises(ValueError) as exc: get_coord_meta('banana') assert exc.value.args[0] == 'Unknown frame: banana' def test_plot_coord_transform(ignore_matplotlibrc): twoMASS_k_header = get_pkg_data_filename('data/2MASS_k_header') twoMASS_k_header = fits.Header.fromtextfile(twoMASS_k_header) fig = plt.figure(figsize=(6, 6)) ax = fig.add_axes([0.15, 0.15, 0.8, 0.8], projection=WCS(twoMASS_k_header), aspect='equal') ax.set_xlim(-0.5, 720.5) ax.set_ylim(-0.5, 720.5) c = SkyCoord(359.76045223*u.deg, 0.26876217*u.deg) with pytest.raises(TypeError): ax.plot_coord(c, 'o', transform=ax.get_transform('galactic')) def test_set_label_properties(ignore_matplotlibrc): # Regression test to make sure that arguments passed to # set_xlabel/set_ylabel are passed to the underlying coordinate helpers ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.set_xlabel('Test x label', labelpad=2, color='red') ax.set_ylabel('Test y label', labelpad=3, color='green') assert ax.coords[0].axislabels.get_text() == 'Test x label' assert ax.coords[0].axislabels.get_minpad('b') == 2 assert ax.coords[0].axislabels.get_color() == 'red' assert ax.coords[1].axislabels.get_text() == 'Test y label' assert ax.coords[1].axislabels.get_minpad('l') == 3 assert ax.coords[1].axislabels.get_color() == 'green' assert ax.get_xlabel() == 'Test x label' assert ax.get_ylabel() == 'Test y label' GAL_HEADER = fits.Header.fromstring(""" SIMPLE = T / conforms to FITS standard BITPIX = -32 / array data type NAXIS = 3 / number of array dimensions NAXIS1 = 31 NAXIS2 = 2881 NAXIS3 = 480 EXTEND = T CTYPE1 = 'DISTMOD ' CRVAL1 = 3.5 CDELT1 = 0.5 CRPIX1 = 1.0 CTYPE2 = 'GLON-CAR' CRVAL2 = 180.0 CDELT2 = -0.125 CRPIX2 = 1.0 CTYPE3 = 'GLAT-CAR' CRVAL3 = 0.0 CDELT3 = 0.125 CRPIX3 = 241.0 """, sep='\n') def test_slicing_warnings(ignore_matplotlibrc, tmpdir): # Regression test to make sure that no warnings are emitted by the tick # locator for the sliced axis when slicing a cube. # Scalar case wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') # Angle case wcs3d = WCS(GAL_HEADER) with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 2)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') def test_plt_xlabel_ylabel(tmpdir): # Regression test for a bug that happened when using plt.xlabel # and plt.ylabel with Matplotlib 3.0 plt.subplot(projection=WCS()) plt.xlabel('Galactic Longitude') plt.ylabel('Galactic Latitude') plt.savefig(tmpdir.join('test.png').strpath) def test_grid_type_contours_transform(tmpdir): # Regression test for a bug that caused grid_type='contours' to not work # with custom transforms class CustomTransform(CurvedTransform): # We deliberately don't define the inverse, and has_inverse should # default to False. def transform(self, values): return values * 1.3 transform = CustomTransform() coord_meta = {'type': ('scalar', 'scalar'), 'unit': (u.m, u.s), 'wrap': (None, None), 'name': ('x', 'y')} fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], transform=transform, coord_meta=coord_meta) fig.add_axes(ax) ax.grid(grid_type='contours') fig.savefig(tmpdir.join('test.png').strpath) def test_plt_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # plt.imshow was called. ax = plt.subplot(projection=WCS()) plt.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_ax_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # ax.imshow was called with no origin ax = plt.subplot(projection=WCS()) ax.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_grid_contour_large_spacing(tmpdir): # Regression test for a bug that caused a crash when grid was called and # didn't produce grid lines (due e.g. to too large spacing) and was then # called again. filename = tmpdir.join('test.png').strpath ax = plt.subplot(projection=WCS()) ax.set_xlim(-0.5, 1.5) ax.set_ylim(-0.5, 1.5) ax.coords[0].set_ticks(values=[] * u.one) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) def test_contour_return(): # Regression test for a bug that caused contour and contourf to return None # instead of the contour object. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) cset = ax.contour(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) cset = ax.contourf(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) def test_contour_empty(): # Regression test for a bug that caused contour to crash if no contours # were present. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) with pytest.warns(UserWarning, match='No contour levels were found within the data range'): ax.contour(np.zeros((4, 4)), transform=ax.get_transform('world')) def test_iterate_coords(ignore_matplotlibrc, tmpdir): # Regression test for a bug that caused ax.coords to return too few axes wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) x, y, z = ax.coords def test_invalid_slices_errors(ignore_matplotlibrc): # Make sure that users get a clear message when specifying a WCS with # >2 dimensions without giving the 'slices' argument, or if the 'slices' # argument has too many/few elements. wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d) assert exc.value.args[0] == ("WCS has more than 2 pixel dimensions, so " "'slices' should be set") with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1, 2)) assert exc.value.args[0] == ("'slices' should have as many elements as " "WCS has pixel dimensions (should be 3)") wcs2d = WCS(naxis=2) wcs2d.wcs.ctype = ['x', 'y'] ax = plt.subplot(1, 1, 1, projection=wcs2d) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('x', 'y')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('y', 'x')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=['x', 'y']) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'x')) assert ax.frame_class is RectangularFrame1D wcs1d = WCS(naxis=1) wcs1d.wcs.ctype = ['x'] ax = plt.subplot(1, 1, 1, projection=wcs1d) assert ax.frame_class is RectangularFrame1D with pytest.raises(ValueError): plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'y')) EXPECTED_REPR_1 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None no 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() EXPECTED_REPR_2 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None yes 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() def test_repr(ignore_matplotlibrc): # Unit test to make sure __repr__ looks as expected wcs3d = WCS(GAL_HEADER) # Cube header has world coordinates as distance, lon, lat, so start off # by slicing in a way that we select just lon,lat: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=(1, 'x', 'y')) assert repr(ax.coords) == EXPECTED_REPR_1 # Now slice in a way that all world coordinates are still present: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) assert repr(ax.coords) == EXPECTED_REPR_2 @pytest.fixture def time_spectral_wcs_2d(): wcs = WCS(naxis=2) wcs.wcs.ctype = ['FREQ', 'TIME'] wcs.wcs.set() return wcs def test_time_wcs(time_spectral_wcs_2d): # Regression test for a bug that caused WCSAxes to error when using a WCS # with a time axis. plt.subplot(projection=time_spectral_wcs_2d) @pytest.mark.skipif('TEX_UNAVAILABLE') def test_simplify_labels_usetex(ignore_matplotlibrc, tmpdir): """Regression test for https://github.com/astropy/astropy/issues/8004.""" plt.rc('text', usetex=True) header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---MOL', 'CTYPE2': 'DEC--MOL', 'RADESYS': 'ICRS'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=EllipticalFrame, projection=wcs)) ax.set_xlim(-0.5, header['NAXIS1'] - 0.5) ax.set_ylim(-0.5, header['NAXIS2'] - 0.5) ax.coords[0].set_ticklabel(exclude_overlapping=True) ax.coords[1].set_ticklabel(exclude_overlapping=True) ax.coords[0].set_ticks(spacing=45 * u.deg) ax.coords[1].set_ticks(spacing=30 * u.deg) ax.grid() fig.savefig(tmpdir / 'plot.png') @pytest.mark.parametrize('frame_class', [RectangularFrame, EllipticalFrame]) def test_set_labels_with_coords(ignore_matplotlibrc, frame_class): """Test if ``axis.set_xlabel()`` calls the correct ``coords[i]_set_axislabel()`` in a WCS plot. Regression test for https://github.com/astropy/astropy/issues/10435. """ labels = ['RA', 'Declination'] header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---AIT', 'CTYPE2': 'DEC--AIT'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=frame_class, projection=wcs)) ax.set_xlabel(labels[0]) ax.set_ylabel(labels[1]) assert ax.get_xlabel() == labels[0] assert ax.get_ylabel() == labels[1] for i in range(2): assert ax.coords[i].get_axislabel() == labels[i] @pytest.mark.parametrize('atol', [0.2, 1.0e-8]) def test_bbox_size(atol): # Test for the size of a WCSAxes bbox (only have Matplotlib >= 3.0 now) extents = [11.38888888888889, 3.5, 576.0, 432.0] fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) fig.canvas.draw() renderer = fig.canvas.renderer ax_bbox = ax.get_tightbbox(renderer) # Enforce strict test only with reference Freetype version if atol < 0.1 and not FREETYPE_261: pytest.xfail("Exact BoundingBox dimensions are only ensured with FreeType 2.6.1") assert np.allclose(ax_bbox.extents, extents, atol=atol)
pllim/astropy
astropy/visualization/wcsaxes/tests/test_misc.py
astropy/constants/astropyconst40.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """The ShapedLikeNDArray mixin class and shape-related functions.""" import abc from itertools import zip_longest import numpy as np __all__ = ['NDArrayShapeMethods', 'ShapedLikeNDArray', 'check_broadcast', 'IncompatibleShapeError', 'unbroadcast'] class NDArrayShapeMethods: """Mixin class to provide shape-changing methods. The class proper is assumed to have some underlying data, which are arrays or array-like structures. It must define a ``shape`` property, which gives the shape of those data, as well as an ``_apply`` method that creates a new instance in which a `~numpy.ndarray` method has been applied to those. Furthermore, for consistency with `~numpy.ndarray`, it is recommended to define a setter for the ``shape`` property, which, like the `~numpy.ndarray.shape` property allows in-place reshaping the internal data (and, unlike the ``reshape`` method raises an exception if this is not possible). This class only provides the shape-changing methods and is meant in particular for `~numpy.ndarray` subclasses that need to keep track of other arrays. For other classes, `~astropy.utils.shapes.ShapedLikeNDArray` is recommended. """ # Note to developers: if new methods are added here, be sure to check that # they work properly with the classes that use this, such as Time and # BaseRepresentation, i.e., look at their ``_apply`` methods and add # relevant tests. This is particularly important for methods that imply # copies rather than views of data (see the special-case treatment of # 'flatten' in Time). def __getitem__(self, item): return self._apply('__getitem__', item) def copy(self, *args, **kwargs): """Return an instance containing copies of the internal data. Parameters are as for :meth:`~numpy.ndarray.copy`. """ return self._apply('copy', *args, **kwargs) def reshape(self, *args, **kwargs): """Returns an instance containing the same data with a new shape. Parameters are as for :meth:`~numpy.ndarray.reshape`. Note that it is not always possible to change the shape of an array without copying the data (see :func:`~numpy.reshape` documentation). If you want an error to be raise if the data is copied, you should assign the new shape to the shape attribute (note: this may not be implemented for all classes using ``NDArrayShapeMethods``). """ return self._apply('reshape', *args, **kwargs) def ravel(self, *args, **kwargs): """Return an instance with the array collapsed into one dimension. Parameters are as for :meth:`~numpy.ndarray.ravel`. Note that it is not always possible to unravel an array without copying the data. If you want an error to be raise if the data is copied, you should should assign shape ``(-1,)`` to the shape attribute. """ return self._apply('ravel', *args, **kwargs) def flatten(self, *args, **kwargs): """Return a copy with the array collapsed into one dimension. Parameters are as for :meth:`~numpy.ndarray.flatten`. """ return self._apply('flatten', *args, **kwargs) def transpose(self, *args, **kwargs): """Return an instance with the data transposed. Parameters are as for :meth:`~numpy.ndarray.transpose`. All internal data are views of the data of the original. """ return self._apply('transpose', *args, **kwargs) @property def T(self): """Return an instance with the data transposed. Parameters are as for :attr:`~numpy.ndarray.T`. All internal data are views of the data of the original. """ if self.ndim < 2: return self else: return self.transpose() def swapaxes(self, *args, **kwargs): """Return an instance with the given axes interchanged. Parameters are as for :meth:`~numpy.ndarray.swapaxes`: ``axis1, axis2``. All internal data are views of the data of the original. """ return self._apply('swapaxes', *args, **kwargs) def diagonal(self, *args, **kwargs): """Return an instance with the specified diagonals. Parameters are as for :meth:`~numpy.ndarray.diagonal`. All internal data are views of the data of the original. """ return self._apply('diagonal', *args, **kwargs) def squeeze(self, *args, **kwargs): """Return an instance with single-dimensional shape entries removed Parameters are as for :meth:`~numpy.ndarray.squeeze`. All internal data are views of the data of the original. """ return self._apply('squeeze', *args, **kwargs) def take(self, indices, axis=None, out=None, mode='raise'): """Return a new instance formed from the elements at the given indices. Parameters are as for :meth:`~numpy.ndarray.take`, except that, obviously, no output array can be given. """ if out is not None: return NotImplementedError("cannot pass 'out' argument to 'take.") return self._apply('take', indices, axis=axis, mode=mode) class ShapedLikeNDArray(NDArrayShapeMethods, metaclass=abc.ABCMeta): """Mixin class to provide shape-changing methods. The class proper is assumed to have some underlying data, which are arrays or array-like structures. It must define a ``shape`` property, which gives the shape of those data, as well as an ``_apply`` method that creates a new instance in which a `~numpy.ndarray` method has been applied to those. Furthermore, for consistency with `~numpy.ndarray`, it is recommended to define a setter for the ``shape`` property, which, like the `~numpy.ndarray.shape` property allows in-place reshaping the internal data (and, unlike the ``reshape`` method raises an exception if this is not possible). This class also defines default implementations for ``ndim`` and ``size`` properties, calculating those from the ``shape``. These can be overridden by subclasses if there are faster ways to obtain those numbers. """ # Note to developers: if new methods are added here, be sure to check that # they work properly with the classes that use this, such as Time and # BaseRepresentation, i.e., look at their ``_apply`` methods and add # relevant tests. This is particularly important for methods that imply # copies rather than views of data (see the special-case treatment of # 'flatten' in Time). @property @abc.abstractmethod def shape(self): """The shape of the underlying data.""" @abc.abstractmethod def _apply(method, *args, **kwargs): """Create a new instance, with ``method`` applied to underlying data. The method is any of the shape-changing methods for `~numpy.ndarray` (``reshape``, ``swapaxes``, etc.), as well as those picking particular elements (``__getitem__``, ``take``, etc.). It will be applied to the underlying arrays (e.g., ``jd1`` and ``jd2`` in `~astropy.time.Time`), with the results used to create a new instance. Parameters ---------- method : str Method to be applied to the instance's internal data arrays. args : tuple Any positional arguments for ``method``. kwargs : dict Any keyword arguments for ``method``. """ @property def ndim(self): """The number of dimensions of the instance and underlying arrays.""" return len(self.shape) @property def size(self): """The size of the object, as calculated from its shape.""" size = 1 for sh in self.shape: size *= sh return size @property def isscalar(self): return self.shape == () def __len__(self): if self.isscalar: raise TypeError("Scalar {!r} object has no len()" .format(self.__class__.__name__)) return self.shape[0] def __bool__(self): """Any instance should evaluate to True, except when it is empty.""" return self.size > 0 def __getitem__(self, item): try: return self._apply('__getitem__', item) except IndexError: if self.isscalar: raise TypeError('scalar {!r} object is not subscriptable.' .format(self.__class__.__name__)) else: raise def __iter__(self): if self.isscalar: raise TypeError('scalar {!r} object is not iterable.' .format(self.__class__.__name__)) # We cannot just write a generator here, since then the above error # would only be raised once we try to use the iterator, rather than # upon its definition using iter(self). def self_iter(): for idx in range(len(self)): yield self[idx] return self_iter() # Functions that change shape or essentially do indexing. _APPLICABLE_FUNCTIONS = { np.moveaxis, np.rollaxis, np.atleast_1d, np.atleast_2d, np.atleast_3d, np.expand_dims, np.broadcast_to, np.flip, np.fliplr, np.flipud, np.rot90, np.roll, np.delete, } # Functions that themselves defer to a method. Those are all # defined in np.core.fromnumeric, but exclude alen as well as # sort and partition, which make copies before calling the method. _METHOD_FUNCTIONS = {getattr(np, name): {'amax': 'max', 'amin': 'min', 'around': 'round', 'round_': 'round', 'alltrue': 'all', 'sometrue': 'any'}.get(name, name) for name in np.core.fromnumeric.__all__ if name not in ['alen', 'sort', 'partition']} # Add np.copy, which we may as well let defer to our method. _METHOD_FUNCTIONS[np.copy] = 'copy' # Could be made to work with a bit of effort: # np.where, np.compress, np.extract, # np.diag_indices_from, np.triu_indices_from, np.tril_indices_from # np.tile, np.repeat (need .repeat method) # TODO: create a proper implementation. # Furthermore, some arithmetic functions such as np.mean, np.median, # could work for Time, and many more for TimeDelta, so those should # override __array_function__. def __array_function__(self, function, types, args, kwargs): """Wrap numpy functions that make sense.""" if function in self._APPLICABLE_FUNCTIONS: if function is np.broadcast_to: # Ensure that any ndarray subclasses used are # properly propagated. kwargs.setdefault('subok', True) elif (function in {np.atleast_1d, np.atleast_2d, np.atleast_3d} and len(args) > 1): return tuple(function(arg, **kwargs) for arg in args) if self is not args[0]: return NotImplemented return self._apply(function, *args[1:], **kwargs) # For functions that defer to methods, use the corresponding # method/attribute if we have it. Otherwise, fall through. if self is args[0] and function in self._METHOD_FUNCTIONS: method = getattr(self, self._METHOD_FUNCTIONS[function], None) if method is not None: if callable(method): return method(*args[1:], **kwargs) else: # For np.shape, etc., just return the attribute. return method # Fall-back, just pass the arguments on since perhaps the function # works already (see above). return function.__wrapped__(*args, **kwargs) class IncompatibleShapeError(ValueError): def __init__(self, shape_a, shape_a_idx, shape_b, shape_b_idx): super().__init__(shape_a, shape_a_idx, shape_b, shape_b_idx) def check_broadcast(*shapes): """ Determines whether two or more Numpy arrays can be broadcast with each other based on their shape tuple alone. Parameters ---------- *shapes : tuple All shapes to include in the comparison. If only one shape is given it is passed through unmodified. If no shapes are given returns an empty `tuple`. Returns ------- broadcast : `tuple` If all shapes are mutually broadcastable, returns a tuple of the full broadcast shape. """ if len(shapes) == 0: return () elif len(shapes) == 1: return shapes[0] reversed_shapes = (reversed(shape) for shape in shapes) full_shape = [] for dims in zip_longest(*reversed_shapes, fillvalue=1): max_dim = 1 max_dim_idx = None for idx, dim in enumerate(dims): if dim == 1: continue if max_dim == 1: # The first dimension of size greater than 1 max_dim = dim max_dim_idx = idx elif dim != max_dim: raise IncompatibleShapeError( shapes[max_dim_idx], max_dim_idx, shapes[idx], idx) full_shape.append(max_dim) return tuple(full_shape[::-1]) def unbroadcast(array): """ Given an array, return a new array that is the smallest subset of the original array that can be re-broadcasted back to the original array. See https://stackoverflow.com/questions/40845769/un-broadcasting-numpy-arrays for more details. """ if array.ndim == 0: return array array = array[tuple((slice(0, 1) if stride == 0 else slice(None)) for stride in array.strides)] # Remove leading ones, which are not needed in numpy broadcasting. first_not_unity = next((i for (i, s) in enumerate(array.shape) if s > 1), array.ndim) return array.reshape(array.shape[first_not_unity:])
# Licensed under a 3-clause BSD style license - see LICENSE.rst from packaging.version import Version import pytest import numpy as np import matplotlib import matplotlib.pyplot as plt from contextlib import nullcontext from matplotlib.contour import QuadContourSet from astropy import units as u from astropy.wcs import WCS from astropy.io import fits from astropy.coordinates import SkyCoord from astropy.utils.data import get_pkg_data_filename from astropy.visualization.wcsaxes.core import WCSAxes from astropy.visualization.wcsaxes.frame import ( EllipticalFrame, RectangularFrame, RectangularFrame1D) from astropy.visualization.wcsaxes.utils import get_coord_meta from astropy.visualization.wcsaxes.transforms import CurvedTransform ft_version = Version(matplotlib.ft2font.__freetype_version__) FREETYPE_261 = ft_version == Version("2.6.1") TEX_UNAVAILABLE = not matplotlib.checkdep_usetex(True) MATPLOTLIB_GT_3_4_2 = Version(matplotlib.__version__) > Version('3.4.2') def teardown_function(function): plt.close('all') def test_grid_regression(ignore_matplotlibrc): # Regression test for a bug that meant that if the rc parameter # axes.grid was set to True, WCSAxes would crash upon initalization. plt.rc('axes', grid=True) fig = plt.figure(figsize=(3, 3)) WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) def test_format_coord_regression(ignore_matplotlibrc, tmpdir): # Regression test for a bug that meant that if format_coord was called by # Matplotlib before the axes were drawn, an error occurred. fig = plt.figure(figsize=(3, 3)) ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) assert ax.format_coord(10, 10) == "" assert ax.coords[0].format_coord(10) == "" assert ax.coords[1].format_coord(10) == "" fig.savefig(tmpdir.join('nothing').strpath) assert ax.format_coord(10, 10) == "10.0 10.0 (world)" assert ax.coords[0].format_coord(10) == "10.0" assert ax.coords[1].format_coord(10) == "10.0" TARGET_HEADER = fits.Header.fromstring(""" NAXIS = 2 NAXIS1 = 200 NAXIS2 = 100 CTYPE1 = 'RA---MOL' CRPIX1 = 500 CRVAL1 = 180.0 CDELT1 = -0.4 CUNIT1 = 'deg ' CTYPE2 = 'DEC--MOL' CRPIX2 = 400 CRVAL2 = 0.0 CDELT2 = 0.4 CUNIT2 = 'deg ' COORDSYS= 'icrs ' """, sep='\n') @pytest.mark.parametrize('grid_type', ['lines', 'contours']) def test_no_numpy_warnings(ignore_matplotlibrc, tmpdir, grid_type): ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.imshow(np.zeros((100, 200))) ax.coords.grid(color='white', grid_type=grid_type) if MATPLOTLIB_GT_3_4_2 and grid_type == 'contours': ctx = pytest.raises(AttributeError, match='dpi') else: ctx = nullcontext() with pytest.warns(None) as warning_lines, ctx: plt.savefig(tmpdir.join('test.png').strpath) # There should be no warnings raised if some pixels are outside WCS # (since this is normal). # BUT catch_warning was ignoring some warnings before, so now we # have to catch it. Otherwise, the pytest filterwarnings=error # setting in setup.cfg will fail this test. # There are actually multiple warnings but they are all similar. for w in warning_lines: w_msg = str(w.message) assert ('converting a masked element to nan' in w_msg or 'No contour levels were found within the data range' in w_msg or 'np.asscalar(a) is deprecated since NumPy v1.16' in w_msg or 'PY_SSIZE_T_CLEAN will be required' in w_msg) def test_invalid_frame_overlay(ignore_matplotlibrc): # Make sure a nice error is returned if a frame doesn't exist ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) with pytest.raises(ValueError) as exc: ax.get_coords_overlay('banana') assert exc.value.args[0] == 'Frame banana not found' with pytest.raises(ValueError) as exc: get_coord_meta('banana') assert exc.value.args[0] == 'Unknown frame: banana' def test_plot_coord_transform(ignore_matplotlibrc): twoMASS_k_header = get_pkg_data_filename('data/2MASS_k_header') twoMASS_k_header = fits.Header.fromtextfile(twoMASS_k_header) fig = plt.figure(figsize=(6, 6)) ax = fig.add_axes([0.15, 0.15, 0.8, 0.8], projection=WCS(twoMASS_k_header), aspect='equal') ax.set_xlim(-0.5, 720.5) ax.set_ylim(-0.5, 720.5) c = SkyCoord(359.76045223*u.deg, 0.26876217*u.deg) with pytest.raises(TypeError): ax.plot_coord(c, 'o', transform=ax.get_transform('galactic')) def test_set_label_properties(ignore_matplotlibrc): # Regression test to make sure that arguments passed to # set_xlabel/set_ylabel are passed to the underlying coordinate helpers ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.set_xlabel('Test x label', labelpad=2, color='red') ax.set_ylabel('Test y label', labelpad=3, color='green') assert ax.coords[0].axislabels.get_text() == 'Test x label' assert ax.coords[0].axislabels.get_minpad('b') == 2 assert ax.coords[0].axislabels.get_color() == 'red' assert ax.coords[1].axislabels.get_text() == 'Test y label' assert ax.coords[1].axislabels.get_minpad('l') == 3 assert ax.coords[1].axislabels.get_color() == 'green' assert ax.get_xlabel() == 'Test x label' assert ax.get_ylabel() == 'Test y label' GAL_HEADER = fits.Header.fromstring(""" SIMPLE = T / conforms to FITS standard BITPIX = -32 / array data type NAXIS = 3 / number of array dimensions NAXIS1 = 31 NAXIS2 = 2881 NAXIS3 = 480 EXTEND = T CTYPE1 = 'DISTMOD ' CRVAL1 = 3.5 CDELT1 = 0.5 CRPIX1 = 1.0 CTYPE2 = 'GLON-CAR' CRVAL2 = 180.0 CDELT2 = -0.125 CRPIX2 = 1.0 CTYPE3 = 'GLAT-CAR' CRVAL3 = 0.0 CDELT3 = 0.125 CRPIX3 = 241.0 """, sep='\n') def test_slicing_warnings(ignore_matplotlibrc, tmpdir): # Regression test to make sure that no warnings are emitted by the tick # locator for the sliced axis when slicing a cube. # Scalar case wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') # Angle case wcs3d = WCS(GAL_HEADER) with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 2)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') def test_plt_xlabel_ylabel(tmpdir): # Regression test for a bug that happened when using plt.xlabel # and plt.ylabel with Matplotlib 3.0 plt.subplot(projection=WCS()) plt.xlabel('Galactic Longitude') plt.ylabel('Galactic Latitude') plt.savefig(tmpdir.join('test.png').strpath) def test_grid_type_contours_transform(tmpdir): # Regression test for a bug that caused grid_type='contours' to not work # with custom transforms class CustomTransform(CurvedTransform): # We deliberately don't define the inverse, and has_inverse should # default to False. def transform(self, values): return values * 1.3 transform = CustomTransform() coord_meta = {'type': ('scalar', 'scalar'), 'unit': (u.m, u.s), 'wrap': (None, None), 'name': ('x', 'y')} fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], transform=transform, coord_meta=coord_meta) fig.add_axes(ax) ax.grid(grid_type='contours') fig.savefig(tmpdir.join('test.png').strpath) def test_plt_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # plt.imshow was called. ax = plt.subplot(projection=WCS()) plt.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_ax_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # ax.imshow was called with no origin ax = plt.subplot(projection=WCS()) ax.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_grid_contour_large_spacing(tmpdir): # Regression test for a bug that caused a crash when grid was called and # didn't produce grid lines (due e.g. to too large spacing) and was then # called again. filename = tmpdir.join('test.png').strpath ax = plt.subplot(projection=WCS()) ax.set_xlim(-0.5, 1.5) ax.set_ylim(-0.5, 1.5) ax.coords[0].set_ticks(values=[] * u.one) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) def test_contour_return(): # Regression test for a bug that caused contour and contourf to return None # instead of the contour object. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) cset = ax.contour(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) cset = ax.contourf(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) def test_contour_empty(): # Regression test for a bug that caused contour to crash if no contours # were present. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) with pytest.warns(UserWarning, match='No contour levels were found within the data range'): ax.contour(np.zeros((4, 4)), transform=ax.get_transform('world')) def test_iterate_coords(ignore_matplotlibrc, tmpdir): # Regression test for a bug that caused ax.coords to return too few axes wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) x, y, z = ax.coords def test_invalid_slices_errors(ignore_matplotlibrc): # Make sure that users get a clear message when specifying a WCS with # >2 dimensions without giving the 'slices' argument, or if the 'slices' # argument has too many/few elements. wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d) assert exc.value.args[0] == ("WCS has more than 2 pixel dimensions, so " "'slices' should be set") with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1, 2)) assert exc.value.args[0] == ("'slices' should have as many elements as " "WCS has pixel dimensions (should be 3)") wcs2d = WCS(naxis=2) wcs2d.wcs.ctype = ['x', 'y'] ax = plt.subplot(1, 1, 1, projection=wcs2d) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('x', 'y')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('y', 'x')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=['x', 'y']) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'x')) assert ax.frame_class is RectangularFrame1D wcs1d = WCS(naxis=1) wcs1d.wcs.ctype = ['x'] ax = plt.subplot(1, 1, 1, projection=wcs1d) assert ax.frame_class is RectangularFrame1D with pytest.raises(ValueError): plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'y')) EXPECTED_REPR_1 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None no 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() EXPECTED_REPR_2 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None yes 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() def test_repr(ignore_matplotlibrc): # Unit test to make sure __repr__ looks as expected wcs3d = WCS(GAL_HEADER) # Cube header has world coordinates as distance, lon, lat, so start off # by slicing in a way that we select just lon,lat: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=(1, 'x', 'y')) assert repr(ax.coords) == EXPECTED_REPR_1 # Now slice in a way that all world coordinates are still present: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) assert repr(ax.coords) == EXPECTED_REPR_2 @pytest.fixture def time_spectral_wcs_2d(): wcs = WCS(naxis=2) wcs.wcs.ctype = ['FREQ', 'TIME'] wcs.wcs.set() return wcs def test_time_wcs(time_spectral_wcs_2d): # Regression test for a bug that caused WCSAxes to error when using a WCS # with a time axis. plt.subplot(projection=time_spectral_wcs_2d) @pytest.mark.skipif('TEX_UNAVAILABLE') def test_simplify_labels_usetex(ignore_matplotlibrc, tmpdir): """Regression test for https://github.com/astropy/astropy/issues/8004.""" plt.rc('text', usetex=True) header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---MOL', 'CTYPE2': 'DEC--MOL', 'RADESYS': 'ICRS'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=EllipticalFrame, projection=wcs)) ax.set_xlim(-0.5, header['NAXIS1'] - 0.5) ax.set_ylim(-0.5, header['NAXIS2'] - 0.5) ax.coords[0].set_ticklabel(exclude_overlapping=True) ax.coords[1].set_ticklabel(exclude_overlapping=True) ax.coords[0].set_ticks(spacing=45 * u.deg) ax.coords[1].set_ticks(spacing=30 * u.deg) ax.grid() fig.savefig(tmpdir / 'plot.png') @pytest.mark.parametrize('frame_class', [RectangularFrame, EllipticalFrame]) def test_set_labels_with_coords(ignore_matplotlibrc, frame_class): """Test if ``axis.set_xlabel()`` calls the correct ``coords[i]_set_axislabel()`` in a WCS plot. Regression test for https://github.com/astropy/astropy/issues/10435. """ labels = ['RA', 'Declination'] header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---AIT', 'CTYPE2': 'DEC--AIT'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=frame_class, projection=wcs)) ax.set_xlabel(labels[0]) ax.set_ylabel(labels[1]) assert ax.get_xlabel() == labels[0] assert ax.get_ylabel() == labels[1] for i in range(2): assert ax.coords[i].get_axislabel() == labels[i] @pytest.mark.parametrize('atol', [0.2, 1.0e-8]) def test_bbox_size(atol): # Test for the size of a WCSAxes bbox (only have Matplotlib >= 3.0 now) extents = [11.38888888888889, 3.5, 576.0, 432.0] fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) fig.canvas.draw() renderer = fig.canvas.renderer ax_bbox = ax.get_tightbbox(renderer) # Enforce strict test only with reference Freetype version if atol < 0.1 and not FREETYPE_261: pytest.xfail("Exact BoundingBox dimensions are only ensured with FreeType 2.6.1") assert np.allclose(ax_bbox.extents, extents, atol=atol)
pllim/astropy
astropy/visualization/wcsaxes/tests/test_misc.py
astropy/utils/shapes.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This file contains a contains the high-level functions to read a VOTable file. """ # STDLIB import io import os import sys import textwrap import warnings # LOCAL from . import exceptions from . import tree from astropy.utils.xml import iterparser from astropy.utils import data from astropy.utils.decorators import deprecated_renamed_argument from astropy.utils.exceptions import AstropyDeprecationWarning __all__ = ['parse', 'parse_single_table', 'from_table', 'writeto', 'validate', 'reset_vo_warnings'] VERIFY_OPTIONS = ['ignore', 'warn', 'exception'] @deprecated_renamed_argument('pedantic', 'verify', pending=True, since='4.0') def parse(source, columns=None, invalid='exception', verify=None, chunk_size=tree.DEFAULT_CHUNK_SIZE, table_number=None, table_id=None, filename=None, unit_format=None, datatype_mapping=None, _debug_python_based_parser=False): """ Parses a VOTABLE_ xml file (or file-like object), and returns a `~astropy.io.votable.tree.VOTableFile` object. Parameters ---------- source : path-like or file-like Path or file-like object containing a VOTABLE_ xml file. If file, must be readable. columns : sequence of str, optional List of field names to include in the output. The default is to include all fields. invalid : str, optional One of the following values: - 'exception': throw an exception when an invalid value is encountered (default) - 'mask': mask out invalid values verify : {'ignore', 'warn', 'exception'}, optional When ``'exception'``, raise an error when the file violates the spec, otherwise either issue a warning (``'warn'``) or silently continue (``'ignore'``). Warnings may be controlled using the standard Python mechanisms. See the `warnings` module in the Python standard library for more information. When not provided, uses the configuration setting ``astropy.io.votable.verify``, which defaults to 'ignore'. .. versionchanged:: 4.0 ``verify`` replaces the ``pedantic`` argument, which will be deprecated in future. chunk_size : int, optional The number of rows to read before converting to an array. Higher numbers are likely to be faster, but will consume more memory. table_number : int, optional The number of table in the file to read in. If `None`, all tables will be read. If a number, 0 refers to the first table in the file, and only that numbered table will be parsed and read in. Should not be used with ``table_id``. table_id : str, optional The ID of the table in the file to read in. Should not be used with ``table_number``. filename : str, optional A filename, URL or other identifier to use in error messages. If *filename* is None and *source* is a string (i.e. a path), then *source* will be used as a filename for error messages. Therefore, *filename* is only required when source is a file-like object. unit_format : str, astropy.units.format.Base instance or None, optional The unit format to use when parsing unit attributes. If a string, must be the name of a unit formatter. The built-in formats include ``generic``, ``fits``, ``cds``, and ``vounit``. A custom formatter may be provided by passing a `~astropy.units.UnitBase` instance. If `None` (default), the unit format to use will be the one specified by the VOTable specification (which is ``cds`` up to version 1.3 of VOTable, and ``vounit`` in more recent versions of the spec). datatype_mapping : dict, optional A mapping of datatype names (`str`) to valid VOTable datatype names (str). For example, if the file being read contains the datatype "unsignedInt" (an invalid datatype in VOTable), include the mapping ``{"unsignedInt": "long"}``. Returns ------- votable : `~astropy.io.votable.tree.VOTableFile` object See also -------- astropy.io.votable.exceptions : The exceptions this function may raise. """ from . import conf invalid = invalid.lower() if invalid not in ('exception', 'mask'): raise ValueError("accepted values of ``invalid`` are: " "``'exception'`` or ``'mask'``.") if verify is None: # NOTE: since the pedantic argument isn't fully deprecated yet, we need # to catch the deprecation warning that occurs when accessing the # configuration item, but only if it is for the pedantic option in the # [io.votable] section. with warnings.catch_warnings(): warnings.filterwarnings("ignore", r"Config parameter \'pedantic\' in section \[io.votable\]", AstropyDeprecationWarning) conf_verify_lowercase = conf.verify.lower() # We need to allow verify to be booleans as strings since the # configuration framework doesn't make it easy/possible to have mixed # types. if conf_verify_lowercase in ['false', 'true']: verify = conf_verify_lowercase == 'true' else: verify = conf_verify_lowercase if isinstance(verify, bool): verify = 'exception' if verify else 'warn' elif verify not in VERIFY_OPTIONS: raise ValueError(f"verify should be one of {'/'.join(VERIFY_OPTIONS)}") if datatype_mapping is None: datatype_mapping = {} config = { 'columns': columns, 'invalid': invalid, 'verify': verify, 'chunk_size': chunk_size, 'table_number': table_number, 'filename': filename, 'unit_format': unit_format, 'datatype_mapping': datatype_mapping } if filename is None and isinstance(source, str): config['filename'] = source with iterparser.get_xml_iterator( source, _debug_python_based_parser=_debug_python_based_parser) as iterator: return tree.VOTableFile( config=config, pos=(1, 1)).parse(iterator, config) def parse_single_table(source, **kwargs): """ Parses a VOTABLE_ xml file (or file-like object), reading and returning only the first `~astropy.io.votable.tree.Table` instance. See `parse` for a description of the keyword arguments. Returns ------- votable : `~astropy.io.votable.tree.Table` object """ if kwargs.get('table_number') is None: kwargs['table_number'] = 0 votable = parse(source, **kwargs) return votable.get_first_table() def writeto(table, file, tabledata_format=None): """ Writes a `~astropy.io.votable.tree.VOTableFile` to a VOTABLE_ xml file. Parameters ---------- table : `~astropy.io.votable.tree.VOTableFile` or `~astropy.table.Table` instance. file : str or writable file-like Path or file object to write to tabledata_format : str, optional Override the format of the table(s) data to write. Must be one of ``tabledata`` (text representation), ``binary`` or ``binary2``. By default, use the format that was specified in each ``table`` object as it was created or read in. See :ref:`astropy:astropy:votable-serialization`. """ from astropy.table import Table if isinstance(table, Table): table = tree.VOTableFile.from_table(table) elif not isinstance(table, tree.VOTableFile): raise TypeError( "first argument must be astropy.io.vo.VOTableFile or " "astropy.table.Table instance") table.to_xml(file, tabledata_format=tabledata_format, _debug_python_based_parser=True) def validate(source, output=None, xmllint=False, filename=None): """ Prints a validation report for the given file. Parameters ---------- source : path-like or file-like Path to a VOTABLE_ xml file or `~pathlib.Path` object having Path to a VOTABLE_ xml file. If file-like object, must be readable. output : file-like, optional Where to output the report. Defaults to ``sys.stdout``. If `None`, the output will be returned as a string. Must be writable. xmllint : bool, optional When `True`, also send the file to ``xmllint`` for schema and DTD validation. Requires that ``xmllint`` is installed. The default is `False`. ``source`` must be a file on the local filesystem in order for ``xmllint`` to work. filename : str, optional A filename to use in the error messages. If not provided, one will be automatically determined from ``source``. Returns ------- is_valid : bool or str Returns `True` if no warnings were found. If ``output`` is `None`, the return value will be a string. """ from astropy.utils.console import print_code_line, color_print if output is None: output = sys.stdout return_as_str = False if output is None: output = io.StringIO() lines = [] votable = None reset_vo_warnings() with data.get_readable_fileobj(source, encoding='binary') as fd: content = fd.read() content_buffer = io.BytesIO(content) content_buffer.seek(0) if filename is None: if isinstance(source, str): filename = source elif hasattr(source, 'name'): filename = source.name elif hasattr(source, 'url'): filename = source.url else: filename = "<unknown>" with warnings.catch_warnings(record=True) as warning_lines: warnings.resetwarnings() warnings.simplefilter("always", exceptions.VOWarning, append=True) try: votable = parse(content_buffer, verify='warn', filename=filename) except ValueError as e: lines.append(str(e)) lines = [str(x.message) for x in warning_lines if issubclass(x.category, exceptions.VOWarning)] + lines content_buffer.seek(0) output.write(f"Validation report for {filename}\n\n") if len(lines): xml_lines = iterparser.xml_readlines(content_buffer) for warning in lines: w = exceptions.parse_vowarning(warning) if not w['is_something']: output.write(w['message']) output.write('\n\n') else: line = xml_lines[w['nline'] - 1] warning = w['warning'] if w['is_warning']: color = 'yellow' else: color = 'red' color_print( f"{w['nline']:d}: ", '', warning or 'EXC', color, ': ', '', textwrap.fill( w['message'], initial_indent=' ', subsequent_indent=' ').lstrip(), file=output) print_code_line(line, w['nchar'], file=output) output.write('\n') else: output.write('astropy.io.votable found no violations.\n\n') success = 0 if xmllint and os.path.exists(filename): from . import xmlutil if votable is None: version = "1.1" else: version = votable.version success, stdout, stderr = xmlutil.validate_schema( filename, version) if success != 0: output.write( 'xmllint schema violations:\n\n') output.write(stderr.decode('utf-8')) else: output.write('xmllint passed\n') if return_as_str: return output.getvalue() return len(lines) == 0 and success == 0 def from_table(table, table_id=None): """ Given an `~astropy.table.Table` object, return a `~astropy.io.votable.tree.VOTableFile` file structure containing just that single table. Parameters ---------- table : `~astropy.table.Table` instance table_id : str, optional If not `None`, set the given id on the returned `~astropy.io.votable.tree.Table` instance. Returns ------- votable : `~astropy.io.votable.tree.VOTableFile` instance """ return tree.VOTableFile.from_table(table, table_id=table_id) def is_votable(source): """ Reads the header of a file to determine if it is a VOTable file. Parameters ---------- source : path-like or file-like Path or file object containing a VOTABLE_ xml file. If file, must be readable. Returns ------- is_votable : bool Returns `True` if the given file is a VOTable file. """ try: with iterparser.get_xml_iterator(source) as iterator: for start, tag, d, pos in iterator: if tag != 'xml': return False break for start, tag, d, pos in iterator: if tag != 'VOTABLE': return False break return True except ValueError: return False def reset_vo_warnings(): """ Resets all of the vo warning state so that warnings that have already been emitted will be emitted again. This is used, for example, by `validate` which must emit all warnings each time it is called. """ from . import converters, xmlutil # -----------------------------------------------------------# # This is a special variable used by the Python warnings # # infrastructure to keep track of warnings that have # # already been seen. Since we want to get every single # # warning out of this, we have to delete all of them first. # # -----------------------------------------------------------# for module in (converters, exceptions, tree, xmlutil): if hasattr(module, '__warningregistry__'): del module.__warningregistry__
# Licensed under a 3-clause BSD style license - see LICENSE.rst from packaging.version import Version import pytest import numpy as np import matplotlib import matplotlib.pyplot as plt from contextlib import nullcontext from matplotlib.contour import QuadContourSet from astropy import units as u from astropy.wcs import WCS from astropy.io import fits from astropy.coordinates import SkyCoord from astropy.utils.data import get_pkg_data_filename from astropy.visualization.wcsaxes.core import WCSAxes from astropy.visualization.wcsaxes.frame import ( EllipticalFrame, RectangularFrame, RectangularFrame1D) from astropy.visualization.wcsaxes.utils import get_coord_meta from astropy.visualization.wcsaxes.transforms import CurvedTransform ft_version = Version(matplotlib.ft2font.__freetype_version__) FREETYPE_261 = ft_version == Version("2.6.1") TEX_UNAVAILABLE = not matplotlib.checkdep_usetex(True) MATPLOTLIB_GT_3_4_2 = Version(matplotlib.__version__) > Version('3.4.2') def teardown_function(function): plt.close('all') def test_grid_regression(ignore_matplotlibrc): # Regression test for a bug that meant that if the rc parameter # axes.grid was set to True, WCSAxes would crash upon initalization. plt.rc('axes', grid=True) fig = plt.figure(figsize=(3, 3)) WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) def test_format_coord_regression(ignore_matplotlibrc, tmpdir): # Regression test for a bug that meant that if format_coord was called by # Matplotlib before the axes were drawn, an error occurred. fig = plt.figure(figsize=(3, 3)) ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) assert ax.format_coord(10, 10) == "" assert ax.coords[0].format_coord(10) == "" assert ax.coords[1].format_coord(10) == "" fig.savefig(tmpdir.join('nothing').strpath) assert ax.format_coord(10, 10) == "10.0 10.0 (world)" assert ax.coords[0].format_coord(10) == "10.0" assert ax.coords[1].format_coord(10) == "10.0" TARGET_HEADER = fits.Header.fromstring(""" NAXIS = 2 NAXIS1 = 200 NAXIS2 = 100 CTYPE1 = 'RA---MOL' CRPIX1 = 500 CRVAL1 = 180.0 CDELT1 = -0.4 CUNIT1 = 'deg ' CTYPE2 = 'DEC--MOL' CRPIX2 = 400 CRVAL2 = 0.0 CDELT2 = 0.4 CUNIT2 = 'deg ' COORDSYS= 'icrs ' """, sep='\n') @pytest.mark.parametrize('grid_type', ['lines', 'contours']) def test_no_numpy_warnings(ignore_matplotlibrc, tmpdir, grid_type): ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.imshow(np.zeros((100, 200))) ax.coords.grid(color='white', grid_type=grid_type) if MATPLOTLIB_GT_3_4_2 and grid_type == 'contours': ctx = pytest.raises(AttributeError, match='dpi') else: ctx = nullcontext() with pytest.warns(None) as warning_lines, ctx: plt.savefig(tmpdir.join('test.png').strpath) # There should be no warnings raised if some pixels are outside WCS # (since this is normal). # BUT catch_warning was ignoring some warnings before, so now we # have to catch it. Otherwise, the pytest filterwarnings=error # setting in setup.cfg will fail this test. # There are actually multiple warnings but they are all similar. for w in warning_lines: w_msg = str(w.message) assert ('converting a masked element to nan' in w_msg or 'No contour levels were found within the data range' in w_msg or 'np.asscalar(a) is deprecated since NumPy v1.16' in w_msg or 'PY_SSIZE_T_CLEAN will be required' in w_msg) def test_invalid_frame_overlay(ignore_matplotlibrc): # Make sure a nice error is returned if a frame doesn't exist ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) with pytest.raises(ValueError) as exc: ax.get_coords_overlay('banana') assert exc.value.args[0] == 'Frame banana not found' with pytest.raises(ValueError) as exc: get_coord_meta('banana') assert exc.value.args[0] == 'Unknown frame: banana' def test_plot_coord_transform(ignore_matplotlibrc): twoMASS_k_header = get_pkg_data_filename('data/2MASS_k_header') twoMASS_k_header = fits.Header.fromtextfile(twoMASS_k_header) fig = plt.figure(figsize=(6, 6)) ax = fig.add_axes([0.15, 0.15, 0.8, 0.8], projection=WCS(twoMASS_k_header), aspect='equal') ax.set_xlim(-0.5, 720.5) ax.set_ylim(-0.5, 720.5) c = SkyCoord(359.76045223*u.deg, 0.26876217*u.deg) with pytest.raises(TypeError): ax.plot_coord(c, 'o', transform=ax.get_transform('galactic')) def test_set_label_properties(ignore_matplotlibrc): # Regression test to make sure that arguments passed to # set_xlabel/set_ylabel are passed to the underlying coordinate helpers ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.set_xlabel('Test x label', labelpad=2, color='red') ax.set_ylabel('Test y label', labelpad=3, color='green') assert ax.coords[0].axislabels.get_text() == 'Test x label' assert ax.coords[0].axislabels.get_minpad('b') == 2 assert ax.coords[0].axislabels.get_color() == 'red' assert ax.coords[1].axislabels.get_text() == 'Test y label' assert ax.coords[1].axislabels.get_minpad('l') == 3 assert ax.coords[1].axislabels.get_color() == 'green' assert ax.get_xlabel() == 'Test x label' assert ax.get_ylabel() == 'Test y label' GAL_HEADER = fits.Header.fromstring(""" SIMPLE = T / conforms to FITS standard BITPIX = -32 / array data type NAXIS = 3 / number of array dimensions NAXIS1 = 31 NAXIS2 = 2881 NAXIS3 = 480 EXTEND = T CTYPE1 = 'DISTMOD ' CRVAL1 = 3.5 CDELT1 = 0.5 CRPIX1 = 1.0 CTYPE2 = 'GLON-CAR' CRVAL2 = 180.0 CDELT2 = -0.125 CRPIX2 = 1.0 CTYPE3 = 'GLAT-CAR' CRVAL3 = 0.0 CDELT3 = 0.125 CRPIX3 = 241.0 """, sep='\n') def test_slicing_warnings(ignore_matplotlibrc, tmpdir): # Regression test to make sure that no warnings are emitted by the tick # locator for the sliced axis when slicing a cube. # Scalar case wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') # Angle case wcs3d = WCS(GAL_HEADER) with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 2)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') def test_plt_xlabel_ylabel(tmpdir): # Regression test for a bug that happened when using plt.xlabel # and plt.ylabel with Matplotlib 3.0 plt.subplot(projection=WCS()) plt.xlabel('Galactic Longitude') plt.ylabel('Galactic Latitude') plt.savefig(tmpdir.join('test.png').strpath) def test_grid_type_contours_transform(tmpdir): # Regression test for a bug that caused grid_type='contours' to not work # with custom transforms class CustomTransform(CurvedTransform): # We deliberately don't define the inverse, and has_inverse should # default to False. def transform(self, values): return values * 1.3 transform = CustomTransform() coord_meta = {'type': ('scalar', 'scalar'), 'unit': (u.m, u.s), 'wrap': (None, None), 'name': ('x', 'y')} fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], transform=transform, coord_meta=coord_meta) fig.add_axes(ax) ax.grid(grid_type='contours') fig.savefig(tmpdir.join('test.png').strpath) def test_plt_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # plt.imshow was called. ax = plt.subplot(projection=WCS()) plt.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_ax_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # ax.imshow was called with no origin ax = plt.subplot(projection=WCS()) ax.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_grid_contour_large_spacing(tmpdir): # Regression test for a bug that caused a crash when grid was called and # didn't produce grid lines (due e.g. to too large spacing) and was then # called again. filename = tmpdir.join('test.png').strpath ax = plt.subplot(projection=WCS()) ax.set_xlim(-0.5, 1.5) ax.set_ylim(-0.5, 1.5) ax.coords[0].set_ticks(values=[] * u.one) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) def test_contour_return(): # Regression test for a bug that caused contour and contourf to return None # instead of the contour object. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) cset = ax.contour(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) cset = ax.contourf(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) def test_contour_empty(): # Regression test for a bug that caused contour to crash if no contours # were present. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) with pytest.warns(UserWarning, match='No contour levels were found within the data range'): ax.contour(np.zeros((4, 4)), transform=ax.get_transform('world')) def test_iterate_coords(ignore_matplotlibrc, tmpdir): # Regression test for a bug that caused ax.coords to return too few axes wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) x, y, z = ax.coords def test_invalid_slices_errors(ignore_matplotlibrc): # Make sure that users get a clear message when specifying a WCS with # >2 dimensions without giving the 'slices' argument, or if the 'slices' # argument has too many/few elements. wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d) assert exc.value.args[0] == ("WCS has more than 2 pixel dimensions, so " "'slices' should be set") with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1, 2)) assert exc.value.args[0] == ("'slices' should have as many elements as " "WCS has pixel dimensions (should be 3)") wcs2d = WCS(naxis=2) wcs2d.wcs.ctype = ['x', 'y'] ax = plt.subplot(1, 1, 1, projection=wcs2d) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('x', 'y')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('y', 'x')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=['x', 'y']) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'x')) assert ax.frame_class is RectangularFrame1D wcs1d = WCS(naxis=1) wcs1d.wcs.ctype = ['x'] ax = plt.subplot(1, 1, 1, projection=wcs1d) assert ax.frame_class is RectangularFrame1D with pytest.raises(ValueError): plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'y')) EXPECTED_REPR_1 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None no 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() EXPECTED_REPR_2 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None yes 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() def test_repr(ignore_matplotlibrc): # Unit test to make sure __repr__ looks as expected wcs3d = WCS(GAL_HEADER) # Cube header has world coordinates as distance, lon, lat, so start off # by slicing in a way that we select just lon,lat: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=(1, 'x', 'y')) assert repr(ax.coords) == EXPECTED_REPR_1 # Now slice in a way that all world coordinates are still present: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) assert repr(ax.coords) == EXPECTED_REPR_2 @pytest.fixture def time_spectral_wcs_2d(): wcs = WCS(naxis=2) wcs.wcs.ctype = ['FREQ', 'TIME'] wcs.wcs.set() return wcs def test_time_wcs(time_spectral_wcs_2d): # Regression test for a bug that caused WCSAxes to error when using a WCS # with a time axis. plt.subplot(projection=time_spectral_wcs_2d) @pytest.mark.skipif('TEX_UNAVAILABLE') def test_simplify_labels_usetex(ignore_matplotlibrc, tmpdir): """Regression test for https://github.com/astropy/astropy/issues/8004.""" plt.rc('text', usetex=True) header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---MOL', 'CTYPE2': 'DEC--MOL', 'RADESYS': 'ICRS'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=EllipticalFrame, projection=wcs)) ax.set_xlim(-0.5, header['NAXIS1'] - 0.5) ax.set_ylim(-0.5, header['NAXIS2'] - 0.5) ax.coords[0].set_ticklabel(exclude_overlapping=True) ax.coords[1].set_ticklabel(exclude_overlapping=True) ax.coords[0].set_ticks(spacing=45 * u.deg) ax.coords[1].set_ticks(spacing=30 * u.deg) ax.grid() fig.savefig(tmpdir / 'plot.png') @pytest.mark.parametrize('frame_class', [RectangularFrame, EllipticalFrame]) def test_set_labels_with_coords(ignore_matplotlibrc, frame_class): """Test if ``axis.set_xlabel()`` calls the correct ``coords[i]_set_axislabel()`` in a WCS plot. Regression test for https://github.com/astropy/astropy/issues/10435. """ labels = ['RA', 'Declination'] header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---AIT', 'CTYPE2': 'DEC--AIT'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=frame_class, projection=wcs)) ax.set_xlabel(labels[0]) ax.set_ylabel(labels[1]) assert ax.get_xlabel() == labels[0] assert ax.get_ylabel() == labels[1] for i in range(2): assert ax.coords[i].get_axislabel() == labels[i] @pytest.mark.parametrize('atol', [0.2, 1.0e-8]) def test_bbox_size(atol): # Test for the size of a WCSAxes bbox (only have Matplotlib >= 3.0 now) extents = [11.38888888888889, 3.5, 576.0, 432.0] fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) fig.canvas.draw() renderer = fig.canvas.renderer ax_bbox = ax.get_tightbbox(renderer) # Enforce strict test only with reference Freetype version if atol < 0.1 and not FREETYPE_261: pytest.xfail("Exact BoundingBox dimensions are only ensured with FreeType 2.6.1") assert np.allclose(ax_bbox.extents, extents, atol=atol)
pllim/astropy
astropy/visualization/wcsaxes/tests/test_misc.py
astropy/io/votable/table.py
import numbers from collections import defaultdict import numpy as np from astropy.utils import isiterable from astropy.utils.decorators import lazyproperty from ..low_level_api import BaseLowLevelWCS from .base import BaseWCSWrapper __all__ = ['sanitize_slices', 'SlicedLowLevelWCS'] def sanitize_slices(slices, ndim): """ Given a slice as input sanitise it to an easier to parse format.format This function returns a list ``ndim`` long containing slice objects (or ints). """ if not isinstance(slices, (tuple, list)): # We just have a single int slices = (slices,) if len(slices) > ndim: raise ValueError( f"The dimensionality of the specified slice {slices} can not be greater " f"than the dimensionality ({ndim}) of the wcs.") if any((isiterable(s) for s in slices)): raise IndexError("This slice is invalid, only integer or range slices are supported.") slices = list(slices) if Ellipsis in slices: if slices.count(Ellipsis) > 1: raise IndexError("an index can only have a single ellipsis ('...')") # Replace the Ellipsis with the correct number of slice(None)s e_ind = slices.index(Ellipsis) slices.remove(Ellipsis) n_e = ndim - len(slices) for i in range(n_e): ind = e_ind + i slices.insert(ind, slice(None)) for i in range(ndim): if i < len(slices): slc = slices[i] if isinstance(slc, slice): if slc.step and slc.step != 1: raise IndexError("Slicing WCS with a step is not supported.") elif not isinstance(slc, numbers.Integral): raise IndexError("Only integer or range slices are accepted.") else: slices.append(slice(None)) return slices def combine_slices(slice1, slice2): """ Given two slices that can be applied to a 1-d array, find the resulting slice that corresponds to the combination of both slices. We assume that slice2 can be an integer, but slice1 cannot. """ if isinstance(slice1, slice) and slice1.step is not None: raise ValueError('Only slices with steps of 1 are supported') if isinstance(slice2, slice) and slice2.step is not None: raise ValueError('Only slices with steps of 1 are supported') if isinstance(slice2, numbers.Integral): if slice1.start is None: return slice2 else: return slice2 + slice1.start if slice1.start is None: if slice1.stop is None: return slice2 else: if slice2.stop is None: return slice(slice2.start, slice1.stop) else: return slice(slice2.start, min(slice1.stop, slice2.stop)) else: if slice2.start is None: start = slice1.start else: start = slice1.start + slice2.start if slice2.stop is None: stop = slice1.stop else: if slice1.start is None: stop = slice2.stop else: stop = slice2.stop + slice1.start if slice1.stop is not None: stop = min(slice1.stop, stop) return slice(start, stop) class SlicedLowLevelWCS(BaseWCSWrapper): """ A Low Level WCS wrapper which applies an array slice to a WCS. This class does not modify the underlying WCS object and can therefore drop coupled dimensions as it stores which pixel and world dimensions have been sliced out (or modified) in the underlying WCS and returns the modified results on all the Low Level WCS methods. Parameters ---------- wcs : `~astropy.wcs.wcsapi.BaseLowLevelWCS` The WCS to slice. slices : `slice` or `tuple` or `int` A valid array slice to apply to the WCS. """ def __init__(self, wcs, slices): slices = sanitize_slices(slices, wcs.pixel_n_dim) if isinstance(wcs, SlicedLowLevelWCS): # Here we combine the current slices with the previous slices # to avoid ending up with many nested WCSes self._wcs = wcs._wcs slices_original = wcs._slices_array.copy() for ipixel in range(wcs.pixel_n_dim): ipixel_orig = wcs._wcs.pixel_n_dim - 1 - wcs._pixel_keep[ipixel] ipixel_new = wcs.pixel_n_dim - 1 - ipixel slices_original[ipixel_orig] = combine_slices(slices_original[ipixel_orig], slices[ipixel_new]) self._slices_array = slices_original else: self._wcs = wcs self._slices_array = slices self._slices_pixel = self._slices_array[::-1] # figure out which pixel dimensions have been kept, then use axis correlation # matrix to figure out which world dims are kept self._pixel_keep = np.nonzero([not isinstance(self._slices_pixel[ip], numbers.Integral) for ip in range(self._wcs.pixel_n_dim)])[0] # axis_correlation_matrix[world, pixel] self._world_keep = np.nonzero( self._wcs.axis_correlation_matrix[:, self._pixel_keep].any(axis=1))[0] if len(self._pixel_keep) == 0 or len(self._world_keep) == 0: raise ValueError("Cannot slice WCS: the resulting WCS should have " "at least one pixel and one world dimension.") @lazyproperty def dropped_world_dimensions(self): """ Information describing the dropped world dimensions. """ world_coords = self._pixel_to_world_values_all(*[0]*len(self._pixel_keep)) dropped_info = defaultdict(list) for i in range(self._wcs.world_n_dim): if i in self._world_keep: continue if "world_axis_object_classes" not in dropped_info: dropped_info["world_axis_object_classes"] = dict() wao_classes = self._wcs.world_axis_object_classes wao_components = self._wcs.world_axis_object_components dropped_info["value"].append(world_coords[i]) dropped_info["world_axis_names"].append(self._wcs.world_axis_names[i]) dropped_info["world_axis_physical_types"].append(self._wcs.world_axis_physical_types[i]) dropped_info["world_axis_units"].append(self._wcs.world_axis_units[i]) dropped_info["world_axis_object_components"].append(wao_components[i]) dropped_info["world_axis_object_classes"].update(dict( filter( lambda x: x[0] == wao_components[i][0], wao_classes.items() ) )) dropped_info["serialized_classes"] = self.serialized_classes return dict(dropped_info) @property def pixel_n_dim(self): return len(self._pixel_keep) @property def world_n_dim(self): return len(self._world_keep) @property def world_axis_physical_types(self): return [self._wcs.world_axis_physical_types[i] for i in self._world_keep] @property def world_axis_units(self): return [self._wcs.world_axis_units[i] for i in self._world_keep] @property def pixel_axis_names(self): return [self._wcs.pixel_axis_names[i] for i in self._pixel_keep] @property def world_axis_names(self): return [self._wcs.world_axis_names[i] for i in self._world_keep] def _pixel_to_world_values_all(self, *pixel_arrays): pixel_arrays = tuple(map(np.asanyarray, pixel_arrays)) pixel_arrays_new = [] ipix_curr = -1 for ipix in range(self._wcs.pixel_n_dim): if isinstance(self._slices_pixel[ipix], int): pixel_arrays_new.append(self._slices_pixel[ipix]) else: ipix_curr += 1 if self._slices_pixel[ipix].start is not None: pixel_arrays_new.append(pixel_arrays[ipix_curr] + self._slices_pixel[ipix].start) else: pixel_arrays_new.append(pixel_arrays[ipix_curr]) pixel_arrays_new = np.broadcast_arrays(*pixel_arrays_new) return self._wcs.pixel_to_world_values(*pixel_arrays_new) def pixel_to_world_values(self, *pixel_arrays): world_arrays = self._pixel_to_world_values_all(*pixel_arrays) # Detect the case of a length 0 array if isinstance(world_arrays, np.ndarray) and not world_arrays.shape: return world_arrays if self._wcs.world_n_dim > 1: # Select the dimensions of the original WCS we are keeping. world_arrays = [world_arrays[iw] for iw in self._world_keep] # If there is only one world dimension (after slicing) we shouldn't return a tuple. if self.world_n_dim == 1: world_arrays = world_arrays[0] return world_arrays def world_to_pixel_values(self, *world_arrays): world_arrays = tuple(map(np.asanyarray, world_arrays)) world_arrays_new = [] iworld_curr = -1 for iworld in range(self._wcs.world_n_dim): if iworld in self._world_keep: iworld_curr += 1 world_arrays_new.append(world_arrays[iworld_curr]) else: world_arrays_new.append(1.) world_arrays_new = np.broadcast_arrays(*world_arrays_new) pixel_arrays = list(self._wcs.world_to_pixel_values(*world_arrays_new)) for ipixel in range(self._wcs.pixel_n_dim): if isinstance(self._slices_pixel[ipixel], slice) and self._slices_pixel[ipixel].start is not None: pixel_arrays[ipixel] -= self._slices_pixel[ipixel].start # Detect the case of a length 0 array if isinstance(pixel_arrays, np.ndarray) and not pixel_arrays.shape: return pixel_arrays pixel = tuple(pixel_arrays[ip] for ip in self._pixel_keep) if self.pixel_n_dim == 1 and self._wcs.pixel_n_dim > 1: pixel = pixel[0] return pixel @property def world_axis_object_components(self): return [self._wcs.world_axis_object_components[idx] for idx in self._world_keep] @property def world_axis_object_classes(self): keys_keep = [item[0] for item in self.world_axis_object_components] return dict([item for item in self._wcs.world_axis_object_classes.items() if item[0] in keys_keep]) @property def array_shape(self): if self._wcs.array_shape: return np.broadcast_to(0, self._wcs.array_shape)[tuple(self._slices_array)].shape @property def pixel_shape(self): if self.array_shape: return tuple(self.array_shape[::-1]) @property def pixel_bounds(self): if self._wcs.pixel_bounds is None: return bounds = [] for idx in self._pixel_keep: if self._slices_pixel[idx].start is None: bounds.append(self._wcs.pixel_bounds[idx]) else: imin, imax = self._wcs.pixel_bounds[idx] start = self._slices_pixel[idx].start bounds.append((imin - start, imax - start)) return tuple(bounds) @property def axis_correlation_matrix(self): return self._wcs.axis_correlation_matrix[self._world_keep][:, self._pixel_keep]
# Licensed under a 3-clause BSD style license - see LICENSE.rst from packaging.version import Version import pytest import numpy as np import matplotlib import matplotlib.pyplot as plt from contextlib import nullcontext from matplotlib.contour import QuadContourSet from astropy import units as u from astropy.wcs import WCS from astropy.io import fits from astropy.coordinates import SkyCoord from astropy.utils.data import get_pkg_data_filename from astropy.visualization.wcsaxes.core import WCSAxes from astropy.visualization.wcsaxes.frame import ( EllipticalFrame, RectangularFrame, RectangularFrame1D) from astropy.visualization.wcsaxes.utils import get_coord_meta from astropy.visualization.wcsaxes.transforms import CurvedTransform ft_version = Version(matplotlib.ft2font.__freetype_version__) FREETYPE_261 = ft_version == Version("2.6.1") TEX_UNAVAILABLE = not matplotlib.checkdep_usetex(True) MATPLOTLIB_GT_3_4_2 = Version(matplotlib.__version__) > Version('3.4.2') def teardown_function(function): plt.close('all') def test_grid_regression(ignore_matplotlibrc): # Regression test for a bug that meant that if the rc parameter # axes.grid was set to True, WCSAxes would crash upon initalization. plt.rc('axes', grid=True) fig = plt.figure(figsize=(3, 3)) WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) def test_format_coord_regression(ignore_matplotlibrc, tmpdir): # Regression test for a bug that meant that if format_coord was called by # Matplotlib before the axes were drawn, an error occurred. fig = plt.figure(figsize=(3, 3)) ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) assert ax.format_coord(10, 10) == "" assert ax.coords[0].format_coord(10) == "" assert ax.coords[1].format_coord(10) == "" fig.savefig(tmpdir.join('nothing').strpath) assert ax.format_coord(10, 10) == "10.0 10.0 (world)" assert ax.coords[0].format_coord(10) == "10.0" assert ax.coords[1].format_coord(10) == "10.0" TARGET_HEADER = fits.Header.fromstring(""" NAXIS = 2 NAXIS1 = 200 NAXIS2 = 100 CTYPE1 = 'RA---MOL' CRPIX1 = 500 CRVAL1 = 180.0 CDELT1 = -0.4 CUNIT1 = 'deg ' CTYPE2 = 'DEC--MOL' CRPIX2 = 400 CRVAL2 = 0.0 CDELT2 = 0.4 CUNIT2 = 'deg ' COORDSYS= 'icrs ' """, sep='\n') @pytest.mark.parametrize('grid_type', ['lines', 'contours']) def test_no_numpy_warnings(ignore_matplotlibrc, tmpdir, grid_type): ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.imshow(np.zeros((100, 200))) ax.coords.grid(color='white', grid_type=grid_type) if MATPLOTLIB_GT_3_4_2 and grid_type == 'contours': ctx = pytest.raises(AttributeError, match='dpi') else: ctx = nullcontext() with pytest.warns(None) as warning_lines, ctx: plt.savefig(tmpdir.join('test.png').strpath) # There should be no warnings raised if some pixels are outside WCS # (since this is normal). # BUT catch_warning was ignoring some warnings before, so now we # have to catch it. Otherwise, the pytest filterwarnings=error # setting in setup.cfg will fail this test. # There are actually multiple warnings but they are all similar. for w in warning_lines: w_msg = str(w.message) assert ('converting a masked element to nan' in w_msg or 'No contour levels were found within the data range' in w_msg or 'np.asscalar(a) is deprecated since NumPy v1.16' in w_msg or 'PY_SSIZE_T_CLEAN will be required' in w_msg) def test_invalid_frame_overlay(ignore_matplotlibrc): # Make sure a nice error is returned if a frame doesn't exist ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) with pytest.raises(ValueError) as exc: ax.get_coords_overlay('banana') assert exc.value.args[0] == 'Frame banana not found' with pytest.raises(ValueError) as exc: get_coord_meta('banana') assert exc.value.args[0] == 'Unknown frame: banana' def test_plot_coord_transform(ignore_matplotlibrc): twoMASS_k_header = get_pkg_data_filename('data/2MASS_k_header') twoMASS_k_header = fits.Header.fromtextfile(twoMASS_k_header) fig = plt.figure(figsize=(6, 6)) ax = fig.add_axes([0.15, 0.15, 0.8, 0.8], projection=WCS(twoMASS_k_header), aspect='equal') ax.set_xlim(-0.5, 720.5) ax.set_ylim(-0.5, 720.5) c = SkyCoord(359.76045223*u.deg, 0.26876217*u.deg) with pytest.raises(TypeError): ax.plot_coord(c, 'o', transform=ax.get_transform('galactic')) def test_set_label_properties(ignore_matplotlibrc): # Regression test to make sure that arguments passed to # set_xlabel/set_ylabel are passed to the underlying coordinate helpers ax = plt.subplot(1, 1, 1, projection=WCS(TARGET_HEADER)) ax.set_xlabel('Test x label', labelpad=2, color='red') ax.set_ylabel('Test y label', labelpad=3, color='green') assert ax.coords[0].axislabels.get_text() == 'Test x label' assert ax.coords[0].axislabels.get_minpad('b') == 2 assert ax.coords[0].axislabels.get_color() == 'red' assert ax.coords[1].axislabels.get_text() == 'Test y label' assert ax.coords[1].axislabels.get_minpad('l') == 3 assert ax.coords[1].axislabels.get_color() == 'green' assert ax.get_xlabel() == 'Test x label' assert ax.get_ylabel() == 'Test y label' GAL_HEADER = fits.Header.fromstring(""" SIMPLE = T / conforms to FITS standard BITPIX = -32 / array data type NAXIS = 3 / number of array dimensions NAXIS1 = 31 NAXIS2 = 2881 NAXIS3 = 480 EXTEND = T CTYPE1 = 'DISTMOD ' CRVAL1 = 3.5 CDELT1 = 0.5 CRPIX1 = 1.0 CTYPE2 = 'GLON-CAR' CRVAL2 = 180.0 CDELT2 = -0.125 CRPIX2 = 1.0 CTYPE3 = 'GLAT-CAR' CRVAL3 = 0.0 CDELT3 = 0.125 CRPIX3 = 241.0 """, sep='\n') def test_slicing_warnings(ignore_matplotlibrc, tmpdir): # Regression test to make sure that no warnings are emitted by the tick # locator for the sliced axis when slicing a cube. # Scalar case wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') # Angle case wcs3d = WCS(GAL_HEADER) with pytest.warns(None) as warning_lines: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 2)) plt.savefig(tmpdir.join('test.png').strpath) # For easy debugging if there are indeed warnings for warning in warning_lines: # https://github.com/astropy/astropy/issues/9690 if 'PY_SSIZE_T_CLEAN' not in str(warning.message): raise AssertionError(f'Unexpected warning: {warning}') def test_plt_xlabel_ylabel(tmpdir): # Regression test for a bug that happened when using plt.xlabel # and plt.ylabel with Matplotlib 3.0 plt.subplot(projection=WCS()) plt.xlabel('Galactic Longitude') plt.ylabel('Galactic Latitude') plt.savefig(tmpdir.join('test.png').strpath) def test_grid_type_contours_transform(tmpdir): # Regression test for a bug that caused grid_type='contours' to not work # with custom transforms class CustomTransform(CurvedTransform): # We deliberately don't define the inverse, and has_inverse should # default to False. def transform(self, values): return values * 1.3 transform = CustomTransform() coord_meta = {'type': ('scalar', 'scalar'), 'unit': (u.m, u.s), 'wrap': (None, None), 'name': ('x', 'y')} fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], transform=transform, coord_meta=coord_meta) fig.add_axes(ax) ax.grid(grid_type='contours') fig.savefig(tmpdir.join('test.png').strpath) def test_plt_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # plt.imshow was called. ax = plt.subplot(projection=WCS()) plt.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_ax_imshow_origin(): # Regression test for a bug that caused origin to be set to upper when # ax.imshow was called with no origin ax = plt.subplot(projection=WCS()) ax.imshow(np.ones((2, 2))) assert ax.get_xlim() == (-0.5, 1.5) assert ax.get_ylim() == (-0.5, 1.5) def test_grid_contour_large_spacing(tmpdir): # Regression test for a bug that caused a crash when grid was called and # didn't produce grid lines (due e.g. to too large spacing) and was then # called again. filename = tmpdir.join('test.png').strpath ax = plt.subplot(projection=WCS()) ax.set_xlim(-0.5, 1.5) ax.set_ylim(-0.5, 1.5) ax.coords[0].set_ticks(values=[] * u.one) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) ax.coords[0].grid(grid_type='contours') plt.savefig(filename) def test_contour_return(): # Regression test for a bug that caused contour and contourf to return None # instead of the contour object. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) cset = ax.contour(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) cset = ax.contourf(np.arange(16).reshape(4, 4), transform=ax.get_transform('world')) assert isinstance(cset, QuadContourSet) def test_contour_empty(): # Regression test for a bug that caused contour to crash if no contours # were present. fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) with pytest.warns(UserWarning, match='No contour levels were found within the data range'): ax.contour(np.zeros((4, 4)), transform=ax.get_transform('world')) def test_iterate_coords(ignore_matplotlibrc, tmpdir): # Regression test for a bug that caused ax.coords to return too few axes wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] wcs3d.wcs.cunit = ['deg', 'deg', 'km/s'] wcs3d.wcs.crpix = [614.5, 856.5, 333] wcs3d.wcs.cdelt = [6.25, 6.25, 23] wcs3d.wcs.crval = [0., 0., 1.] ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) x, y, z = ax.coords def test_invalid_slices_errors(ignore_matplotlibrc): # Make sure that users get a clear message when specifying a WCS with # >2 dimensions without giving the 'slices' argument, or if the 'slices' # argument has too many/few elements. wcs3d = WCS(naxis=3) wcs3d.wcs.ctype = ['x', 'y', 'z'] plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d) assert exc.value.args[0] == ("WCS has more than 2 pixel dimensions, so " "'slices' should be set") with pytest.raises(ValueError) as exc: plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1, 2)) assert exc.value.args[0] == ("'slices' should have as many elements as " "WCS has pixel dimensions (should be 3)") wcs2d = WCS(naxis=2) wcs2d.wcs.ctype = ['x', 'y'] ax = plt.subplot(1, 1, 1, projection=wcs2d) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('x', 'y')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=('y', 'x')) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=['x', 'y']) assert ax.frame_class is RectangularFrame ax = plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'x')) assert ax.frame_class is RectangularFrame1D wcs1d = WCS(naxis=1) wcs1d.wcs.ctype = ['x'] ax = plt.subplot(1, 1, 1, projection=wcs1d) assert ax.frame_class is RectangularFrame1D with pytest.raises(ValueError): plt.subplot(1, 1, 1, projection=wcs2d, slices=(1, 'y')) EXPECTED_REPR_1 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None no 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() EXPECTED_REPR_2 = """ <CoordinatesMap with 3 world coordinates: index aliases type unit wrap format_unit visible ----- ------------------------------ --------- ---- ---- ----------- ------- 0 distmod dist scalar None yes 1 pos.galactic.lon glon-car glon longitude deg 360 deg yes 2 pos.galactic.lat glat-car glat latitude deg None deg yes > """.strip() def test_repr(ignore_matplotlibrc): # Unit test to make sure __repr__ looks as expected wcs3d = WCS(GAL_HEADER) # Cube header has world coordinates as distance, lon, lat, so start off # by slicing in a way that we select just lon,lat: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=(1, 'x', 'y')) assert repr(ax.coords) == EXPECTED_REPR_1 # Now slice in a way that all world coordinates are still present: ax = plt.subplot(1, 1, 1, projection=wcs3d, slices=('x', 'y', 1)) assert repr(ax.coords) == EXPECTED_REPR_2 @pytest.fixture def time_spectral_wcs_2d(): wcs = WCS(naxis=2) wcs.wcs.ctype = ['FREQ', 'TIME'] wcs.wcs.set() return wcs def test_time_wcs(time_spectral_wcs_2d): # Regression test for a bug that caused WCSAxes to error when using a WCS # with a time axis. plt.subplot(projection=time_spectral_wcs_2d) @pytest.mark.skipif('TEX_UNAVAILABLE') def test_simplify_labels_usetex(ignore_matplotlibrc, tmpdir): """Regression test for https://github.com/astropy/astropy/issues/8004.""" plt.rc('text', usetex=True) header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---MOL', 'CTYPE2': 'DEC--MOL', 'RADESYS': 'ICRS'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=EllipticalFrame, projection=wcs)) ax.set_xlim(-0.5, header['NAXIS1'] - 0.5) ax.set_ylim(-0.5, header['NAXIS2'] - 0.5) ax.coords[0].set_ticklabel(exclude_overlapping=True) ax.coords[1].set_ticklabel(exclude_overlapping=True) ax.coords[0].set_ticks(spacing=45 * u.deg) ax.coords[1].set_ticks(spacing=30 * u.deg) ax.grid() fig.savefig(tmpdir / 'plot.png') @pytest.mark.parametrize('frame_class', [RectangularFrame, EllipticalFrame]) def test_set_labels_with_coords(ignore_matplotlibrc, frame_class): """Test if ``axis.set_xlabel()`` calls the correct ``coords[i]_set_axislabel()`` in a WCS plot. Regression test for https://github.com/astropy/astropy/issues/10435. """ labels = ['RA', 'Declination'] header = { 'NAXIS': 2, 'NAXIS1': 360, 'NAXIS2': 180, 'CRPIX1': 180.5, 'CRPIX2': 90.5, 'CRVAL1': 180.0, 'CRVAL2': 0.0, 'CDELT1': -2 * np.sqrt(2) / np.pi, 'CDELT2': 2 * np.sqrt(2) / np.pi, 'CTYPE1': 'RA---AIT', 'CTYPE2': 'DEC--AIT'} wcs = WCS(header) fig, ax = plt.subplots( subplot_kw=dict(frame_class=frame_class, projection=wcs)) ax.set_xlabel(labels[0]) ax.set_ylabel(labels[1]) assert ax.get_xlabel() == labels[0] assert ax.get_ylabel() == labels[1] for i in range(2): assert ax.coords[i].get_axislabel() == labels[i] @pytest.mark.parametrize('atol', [0.2, 1.0e-8]) def test_bbox_size(atol): # Test for the size of a WCSAxes bbox (only have Matplotlib >= 3.0 now) extents = [11.38888888888889, 3.5, 576.0, 432.0] fig = plt.figure() ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8]) fig.add_axes(ax) fig.canvas.draw() renderer = fig.canvas.renderer ax_bbox = ax.get_tightbbox(renderer) # Enforce strict test only with reference Freetype version if atol < 0.1 and not FREETYPE_261: pytest.xfail("Exact BoundingBox dimensions are only ensured with FreeType 2.6.1") assert np.allclose(ax_bbox.extents, extents, atol=atol)
pllim/astropy
astropy/visualization/wcsaxes/tests/test_misc.py
astropy/wcs/wcsapi/wrappers/sliced_wcs.py
from email.utils import parseaddr from furl import furl from waterbutler.core import metadata class BaseBitbucketMetadata(metadata.BaseMetadata): """Metadata properties common to Bitbucket files and folders *commit*: The commit sha that this metadata snapshot applies to. The commit may not actually involve changes to the entity. *branch*: A branch is mutable pointer to a commit sha. If a branch name was used to retrieve the metadata for this entity, this property will be set. Otherwise, it is ``None``. *ref*: The ref (commit or branch name) that this entity belongs to. For mutating actions, this is the ref after the action has been committed. """ def __init__(self, raw, path_obj, owner=None, repo=None): super().__init__(raw) self._path_obj = path_obj self.owner = owner self.repo = repo @property def provider(self): return 'bitbucket' @property def path(self): return self.build_path() @property def name(self): return self._path_obj.name @property def commit_sha(self): return self._path_obj.commit_sha @property def branch_name(self): return self._path_obj.branch_name @property def extra(self): return { 'commitSha': self.commit_sha, 'branch': self.branch_name, # may be None if revision id is a sha } def build_path(self): return super().build_path(self._path_obj.raw_path) def _json_api_links(self, resource): """Update JSON-API links to add commitSha or branch, if available""" links = super()._json_api_links(resource) ref = None if self.commit_sha is not None: ref = {'commitSha': self.commit_sha} elif self.branch_name is not None: ref = {'branch': self.branch_name} if ref is not None: for action, link in links.items(): links[action] = furl(link).add(ref).url for action in ['delete', 'upload', 'new_folder']: if action in links: links[action] = None return links class BitbucketFileMetadata(BaseBitbucketMetadata, metadata.BaseFileMetadata): @property def size(self): return self.raw['size'] @property def modified(self): return self.raw['timestamp'] @property def created_utc(self): return None @property def content_type(self): return None @property def etag(self): return '{}::{}'.format(self.path, self.commit_sha) # FIXME: maybe last_commit_sha? @property def extra(self): return dict(super().extra, **{ 'webView': self.web_view, 'lastCommitSha': self.last_commit_sha, }) @property def last_commit_sha(self): return self.raw['revision'] @property def web_view(self): return 'https://bitbucket.org/{}/{}/src/{}{}?fileviewer=file-view-default'.format( self.owner, self.repo, self.commit_sha, self.path, ) class BitbucketFolderMetadata(BaseBitbucketMetadata, metadata.BaseFolderMetadata): pass class BitbucketRevisionMetadata(metadata.BaseFileRevisionMetadata): @property def version_identifier(self): return 'commitSha' @property def modified(self): return self.raw['timestamp'] @property def modified_utc(self): return self.raw['utctimestamp'] @property def version(self): return self.raw['raw_node'] @property def extra(self): return { 'user': { 'name': parseaddr(self.raw['raw_author'])[0] # real name only }, 'branch': self.raw['branch'], }
import pytest from waterbutler.providers.bitbucket.path import BitbucketPath from waterbutler.providers.bitbucket.metadata import BitbucketFileMetadata from waterbutler.providers.bitbucket.metadata import BitbucketFolderMetadata from waterbutler.providers.bitbucket.metadata import BitbucketRevisionMetadata from .fixtures import owner, repo, file_metadata, folder_metadata, revision_metadata COMMIT_SHA = '123abc456def' class TestBitbucketMetadata: def test_build_file_metadata(self, file_metadata, owner, repo): name = 'aaa-01-2.txt' subdir = 'plaster' full_path = '/{}/{}'.format(subdir, name) branch = 'master' path = BitbucketPath(full_path, _ids=[ (COMMIT_SHA, branch), (COMMIT_SHA, branch), (COMMIT_SHA, branch) ]) try: metadata = BitbucketFileMetadata(file_metadata, path, owner=owner, repo=repo) except Exception as exc: pytest.fail(str(exc)) assert metadata.name == name assert metadata.path == full_path assert metadata.kind == 'file' assert metadata.modified == '2016-10-14T00:37:55Z' assert metadata.modified_utc == '2016-10-14T00:37:55+00:00' assert metadata.created_utc is None assert metadata.content_type is None assert metadata.size == 13 assert metadata.size_as_int == 13 assert metadata.etag == '{}::{}'.format(full_path,COMMIT_SHA) assert metadata.provider == 'bitbucket' assert metadata.last_commit_sha == '90c8f7eef948' assert metadata.commit_sha == COMMIT_SHA assert metadata.branch_name == branch web_view = ('https://bitbucket.org/{}/{}/src/{}{}?' 'fileviewer=file-view-default'.format(owner, repo, COMMIT_SHA, full_path)) assert metadata.web_view == web_view assert metadata.extra == { 'commitSha': COMMIT_SHA, 'branch': 'master', 'webView': web_view, 'lastCommitSha': '90c8f7eef948', } resource = 'mst3k' assert metadata._json_api_links(resource) == { 'delete': None, 'upload': None, 'move': 'http://localhost:7777/v1/resources/{}/providers/bitbucket{}?commitSha={}'.format(resource, full_path, COMMIT_SHA), 'download': 'http://localhost:7777/v1/resources/{}/providers/bitbucket{}?commitSha={}'.format(resource, full_path, COMMIT_SHA), } def test_build_folder_metadata(self, folder_metadata, owner, repo): branch = 'master' name = 'plaster' path = BitbucketPath('/{}/'.format(name), _ids=[(None, branch), (None, branch)]) try: metadata = BitbucketFolderMetadata(folder_metadata, path, owner=owner, repo=repo) except Exception as exc: pytest.fail(str(exc)) assert metadata.name == name assert metadata.path == '/{}/'.format(name) assert metadata.kind == 'folder' assert metadata.children is None assert metadata.extra == { 'commitSha': None, 'branch': branch, } assert metadata.provider == 'bitbucket' assert metadata.commit_sha is None assert metadata.branch_name == branch assert metadata._json_api_links('mst3k') == { 'delete': None, 'upload': None, 'move': 'http://localhost:7777/v1/resources/mst3k/providers/bitbucket/{}/?branch={}'.format(name, branch), 'new_folder': None, } def test_build_revision_metadata(self, revision_metadata): try: metadata = BitbucketRevisionMetadata(revision_metadata) except Exception as exc: pytest.fail(str(exc)) assert metadata.modified == '2016-09-08 21:20:59' assert metadata.modified_utc == '2016-09-08T19:20:59+00:00' assert metadata.version_identifier == 'commitSha' assert metadata.version == '522a6be9f98ddf7938d7e9568a6375cd0f88e40e' assert metadata.extra == { 'user': { 'name': 'Fitz Elliott', }, 'branch': 'smallbranch-a', }
RCOSDP/waterbutler
tests/providers/bitbucket/test_metadata.py
waterbutler/providers/bitbucket/metadata.py
from __future__ import absolute_import, division, print_function import warnings import datashape from datashape import String, DataShape, Option, bool_ from odo.utils import copydoc from .expressions import schema_method_list, ElemWise from .arithmetic import Interp, Repeat, _mkbin, repeat, interp, _add, _radd from ..compatibility import basestring, _inttypes, builtins from ..deprecation import deprecated __all__ = ['Like', 'like', 'Pad', 'Replace', 'SliceReplace', # prevent 'len' to end up in global namespace #'len', 'upper', 'lower', 'cat', 'isalnum', 'isalpha', 'isdecimal', 'isdigit', 'islower', 'isnumeric', 'isspace', 'istitle', 'isupper', 'StrCat', 'find', 'StrFind', 'StrSlice', 'slice', 'slice_replace', 'replace', 'capitalize', 'strip', 'lstrip', 'rstrip', 'pad', 'UnaryStringFunction'] def _validate(var, name, type, typename): if not isinstance(var, type): raise TypeError('"%s" argument must be a %s'%(name, typename)) def _validate_optional(var, name, type, typename): if var is not None and not isinstance(var, type): raise TypeError('"%s" argument must be a %s'%(name, typename)) class Like(ElemWise): """ Filter expression by string comparison >>> from blaze import symbol, like, compute >>> t = symbol('t', 'var * {name: string, city: string}') >>> expr = t[t.name.like('Alice*')] >>> data = [('Alice Smith', 'New York'), ... ('Bob Jones', 'Chicago'), ... ('Alice Walker', 'LA')] >>> list(compute(expr, data)) [('Alice Smith', 'New York'), ('Alice Walker', 'LA')] """ _arguments = '_child', 'pattern' def _dshape(self): shape, schema = self._child.dshape.shape, self._child.schema schema = Option(bool_) if isinstance(schema.measure, Option) else bool_ return DataShape(*(shape + (schema,))) @copydoc(Like) def like(child, pattern): if not isinstance(pattern, basestring): raise TypeError('pattern argument must be a string') return Like(child, pattern) class UnaryStringFunction(ElemWise): """String function that only takes a single argument. """ _arguments = '_child', class len(UnaryStringFunction): schema = datashape.int64 class upper(UnaryStringFunction): @property def schema(self): return self._child.schema class lower(UnaryStringFunction): @property def schema(self): return self._child.schema class PredicateFunction(UnaryStringFunction): @property def schema(self): return bool_ if self._child.schema == datashape.string else Option(bool_) class isalnum(PredicateFunction): pass class isalpha(PredicateFunction): pass class isdecimal(PredicateFunction): pass class isdigit(PredicateFunction): pass class islower(PredicateFunction): pass class isnumeric(PredicateFunction): pass class isspace(PredicateFunction): pass class istitle(PredicateFunction): pass class isupper(PredicateFunction): pass class StrFind(ElemWise): """ Find literal substring in string column. """ _arguments = '_child', 'sub' schema = Option(datashape.int64) @copydoc(StrFind) def find(col, sub): if not isinstance(sub, basestring): raise TypeError("'sub' argument must be a String") return StrFind(col, sub) class Replace(ElemWise): _arguments = '_child', 'old', 'new', 'max' @property def schema(self): return self._child.schema def replace(col, old, new, max=None): _validate(old, 'old', basestring, 'string') _validate(new, 'new', basestring, 'string') _validate_optional(max, 'max', int, 'integer') return Replace(col, old, new, max) class Pad(ElemWise): _arguments = '_child', 'width', 'side', 'fillchar' @property def schema(self): return self._child.schema def pad(col, width, side=None, fillchar=None): _validate(width, 'width', int, 'integer') if side not in (None, 'left', 'right'): raise TypeError('"side" argument must be either "left" or "right"') _validate_optional(fillchar, 'fillchar', basestring, 'string') return Pad(col, width, side, fillchar) class capitalize(UnaryStringFunction): @property def schema(self): return self._child.schema class strip(UnaryStringFunction): @property def schema(self): return self._child.schema class lstrip(UnaryStringFunction): @property def schema(self): return self._child.schema class rstrip(UnaryStringFunction): @property def schema(self): return self._child.schema class StrSlice(ElemWise): _arguments = '_child', 'slice' @property def schema(self): return self._child.schema class SliceReplace(ElemWise): _arguments = '_child', 'start', 'stop', 'repl' @property def schema(self): return self._child.schema def slice_replace(col, start=None, stop=None, repl=None): _validate_optional(start, 'start', int, 'integer') _validate_optional(stop, 'stop', int, 'integer') _validate_optional(repl, 'repl', basestring, 'string') return SliceReplace(col, start, stop, repl) @copydoc(StrSlice) def slice(col, idx): if not isinstance(idx, (builtins.slice, _inttypes)): raise TypeError("idx argument must be a slice or integer, given {}".format(slc)) return StrSlice(col, (idx.start, idx.stop, idx.step) if isinstance(idx, builtins.slice) else idx) class StrCat(ElemWise): """ Concatenate two string columns together with optional 'sep' argument. >>> import pandas as pd >>> from blaze import symbol, compute, dshape >>> ds = dshape('3 * {name: ?string, comment: ?string, num: int32}') >>> s = symbol('s', dshape=ds) >>> data = [('al', 'good', 0), ('suri', 'not good', 1), ('jinka', 'ok', 2)] >>> df = pd.DataFrame(data, columns=['name', 'comment', 'num']) >>> compute(s.name.str.cat(s.comment, sep=' -- '), df) 0 al -- good 1 suri -- not good 2 jinka -- ok Name: name, dtype: object For rows with null entries, it returns null. This is consistent with default pandas behavior with kwarg: na_rep=None. >>> data = [(None, None, 0), ('suri', 'not good', 1), ('jinka', None, 2)] >>> df = pd.DataFrame(data, columns=['name', 'comment', 'num']) >>> compute(s.name.str.cat(s.comment, sep=' -- '), df) 0 NaN 1 suri -- not good 2 NaN Name: name, dtype: object """ _arguments = 'lhs', 'rhs', 'sep' _input_attributes = 'lhs', 'rhs' def _dshape(self): ''' since pandas supports concat for string columns, do the same for blaze ''' shape = self.lhs.dshape.shape if isinstance(self.lhs.schema.measure, Option): schema = self.lhs.schema elif isinstance(self.rhs.schema.measure, Option): schema = self.rhs.schema else: _, lhs_encoding = self.lhs.schema.measure.parameters _, rhs_encoding = self.rhs.schema.measure.parameters assert lhs_encoding == rhs_encoding # convert fixed length string to variable length string schema = DataShape(String(None, lhs_encoding)) return DataShape(*(shape + (schema,))) @copydoc(StrCat) def cat(lhs, rhs, sep=None): """ returns lhs + sep + rhs Raises: Invoking on a non string column raises a TypeError If kwarg 'sep' is not a string, raises a TypeError """ # pandas supports concat for string columns only, do the same for blaze if not isstring(rhs.dshape): raise TypeError("can only concat string columns") _validate_optional(sep, 'sep', basestring, 'string') return StrCat(lhs, rhs, sep=sep) def isstring(ds): measure = ds.measure return isinstance(getattr(measure, 'ty', measure), String) _mod, _rmod = _mkbin('mod', Interp) _mul, _rmul = _mkbin('mul', Repeat) class str_ns(object): def __init__(self, field): self.field = field def upper(self): return upper(self.field) def lower(self): return lower(self.field) def len(self): return len(self.field) def like(self, pattern): return like(self.field, pattern) def cat(self, other, sep=None): return cat(self.field, other, sep=sep) def find(self, sub): return find(self.field, sub) def isalnum(self): return isalnum(self.field) def isalpha(self): return isalpha(self.field) def isdecimal(self): return isdecimal(self.field) def isdigit(self): return isdigit(self.field) def islower(self): return islower(self.field) def isnumeric(self): return isnumeric(self.field) def isspace(self): return isspace(self.field) def istitle(self): return istitle(self.field) def isupper(self): return isupper(self.field) def replace(self, old, new, max=None): return replace(self.field, old, new, max) def capitalize(self): return capitalize(self.field) def pad(self, width, side=None, fillchar=None): return pad(self.field, width, side, fillchar) def strip(self): return strip(self.field) def lstrip(self): return lstrip(self.field) def rstrip(self): return rstrip(self.field) def __getitem__(self, idx): return slice(self.field, idx) def slice_replace(self, start=None, stop=None, repl=None): return slice_replace(self.field, start, stop, repl) class str(object): __name__ = 'str' def __get__(self, obj, type): return str_ns(obj) if obj is not None else self @deprecated('0.11', replacement='len()') def str_len(*args, **kwds): return len(*args, **kwds) @deprecated('0.11', replacement='upper()') def str_upper(*args, **kwds): return upper(*args, **kwds) @deprecated('0.11', replacement='lower()') def str_lower(*args, **kwds): return lower(*args, **kwds) @deprecated('0.11', replacement='cat(lhs, rhs, sep=None)') def str_cat(*args, **kwds): return cat(*args, **kwds) schema_method_list.extend([(isstring, set([_add, _radd, _mod, _rmod, _mul, _rmul, str(), repeat, interp, like, str_len, # deprecated str_upper, # deprecated str_lower, # deprecated str_cat]))]) # deprecated
import pytest import os from blaze import data, compute from blaze.utils import raises from odo import URL, CSV import pandas as pd import pandas.util.testing as tm from functools import partial try: from urllib2 import urlopen from urllib2 import HTTPError, URLError except ImportError: from urllib.request import urlopen from urllib.error import HTTPError, URLError pytestmark = pytest.mark.skipif(raises(URLError, partial(urlopen, "http://google.com")), reason='unable to connect to google.com') iris_url = ('https://raw.githubusercontent.com/' 'blaze/blaze/master/blaze/examples/data/iris.csv') @pytest.fixture def iris_local(): thisdir = os.path.abspath(os.path.dirname(__file__)) return data(os.path.join(thisdir, os.pardir, os.pardir, "examples", "data", "iris.csv")) def test_url_csv_data(iris_local): iris_remote = data(iris_url) assert isinstance(iris_remote.data, URL(CSV)) iris_remote_df = compute(iris_remote) assert isinstance(iris_remote_df, pd.DataFrame) iris_local_df = compute(iris_local) tm.assert_frame_equal(iris_remote_df, iris_local_df)
ContinuumIO/blaze
blaze/compute/tests/test_url_csv_compute.py
blaze/expr/strings.py
# -*- coding: utf-8 -*- """ Base settings file, common to all environments. These settings can be overridden in local.py. """ import datetime import os import json import hashlib import logging from datetime import timedelta from collections import OrderedDict import enum os_env = os.environ def parent_dir(path): '''Return the parent of a directory.''' return os.path.abspath(os.path.join(path, os.pardir)) HERE = os.path.dirname(os.path.abspath(__file__)) BASE_PATH = parent_dir(HERE) # website/ directory APP_PATH = parent_dir(BASE_PATH) ADDON_PATH = os.path.join(APP_PATH, 'addons') STATIC_FOLDER = os.path.join(BASE_PATH, 'static') STATIC_URL_PATH = '/static' ASSET_HASH_PATH = os.path.join(APP_PATH, 'webpack-assets.json') ROOT = os.path.join(BASE_PATH, '..') BCRYPT_LOG_ROUNDS = 12 LOG_LEVEL = logging.INFO TEST_ENV = False with open(os.path.join(APP_PATH, 'package.json'), 'r') as fobj: VERSION = json.load(fobj)['version'] # Expiration time for verification key EXPIRATION_TIME_DICT = { 'password': 24 * 60, # 24 hours in minutes for forgot and reset password 'confirm': 24 * 60, # 24 hours in minutes for confirm account and email 'claim': 30 * 24 * 60 # 30 days in minutes for claim contributor-ship } CITATION_STYLES_PATH = os.path.join(BASE_PATH, 'static', 'vendor', 'bower_components', 'styles') # Minimum seconds between forgot password email attempts SEND_EMAIL_THROTTLE = 30 # Minimum seconds between attempts to change password CHANGE_PASSWORD_THROTTLE = 30 # Number of incorrect password attempts allowed before throttling. INCORRECT_PASSWORD_ATTEMPTS_ALLOWED = 3 # Seconds that must elapse before updating a user's date_last_login field DATE_LAST_LOGIN_THROTTLE = 60 DATE_LAST_LOGIN_THROTTLE_DELTA = datetime.timedelta(seconds=DATE_LAST_LOGIN_THROTTLE) # Seconds that must elapse before change password attempts are reset(currently 1 hour) TIME_RESET_CHANGE_PASSWORD_ATTEMPTS = 3600 # Hours before pending embargo/retraction/registration automatically becomes active RETRACTION_PENDING_TIME = datetime.timedelta(days=2) EMBARGO_PENDING_TIME = datetime.timedelta(days=2) EMBARGO_TERMINATION_PENDING_TIME = datetime.timedelta(days=2) REGISTRATION_APPROVAL_TIME = datetime.timedelta(days=2) # Date range for embargo periods EMBARGO_END_DATE_MIN = datetime.timedelta(days=2) EMBARGO_END_DATE_MAX = datetime.timedelta(days=1460) # Four years # Question titles to be reomved for anonymized VOL ANONYMIZED_TITLES = ['Authors'] LOAD_BALANCER = False # May set these to True in local.py for development DEV_MODE = False DEBUG_MODE = False SECURE_MODE = not DEBUG_MODE # Set secure cookie PROTOCOL = 'https://' if SECURE_MODE else 'http://' DOMAIN = PROTOCOL + 'localhost:5000/' INTERNAL_DOMAIN = DOMAIN API_DOMAIN = PROTOCOL + 'localhost:8000/' PREPRINT_PROVIDER_DOMAINS = { 'enabled': False, 'prefix': PROTOCOL, 'suffix': '/' } # External Ember App Local Development USE_EXTERNAL_EMBER = False PROXY_EMBER_APPS = False # http://docs.python-requests.org/en/master/user/advanced/#timeouts EXTERNAL_EMBER_SERVER_TIMEOUT = 3.05 EXTERNAL_EMBER_APPS = {} LOG_PATH = os.path.join(APP_PATH, 'logs') TEMPLATES_PATH = os.path.join(BASE_PATH, 'templates') # User management & registration CONFIRM_REGISTRATIONS_BY_EMAIL = True ALLOW_LOGIN = True SEARCH_ENGINE = 'elastic' # Can be 'elastic', or None ELASTIC_URI = '127.0.0.1:9200' ELASTIC_TIMEOUT = 10 ELASTIC_INDEX = 'website' ELASTIC_KWARGS = { # 'use_ssl': False, # 'verify_certs': True, # 'ca_certs': None, # 'client_cert': None, # 'client_key': None } # Sessions COOKIE_NAME = 'osf' # TODO: Override OSF_COOKIE_DOMAIN in local.py in production OSF_COOKIE_DOMAIN = None # server-side verification timeout OSF_SESSION_TIMEOUT = 30 * 24 * 60 * 60 # 30 days in seconds # TODO: Override SECRET_KEY in local.py in production SECRET_KEY = 'CHANGEME' SESSION_COOKIE_SECURE = SECURE_MODE SESSION_COOKIE_SAMESITE = 'None' SESSION_COOKIE_HTTPONLY = True # local path to private key and cert for local development using https, overwrite in local.py OSF_SERVER_KEY = None OSF_SERVER_CERT = None # External services USE_CDN_FOR_CLIENT_LIBS = True USE_EMAIL = True FROM_EMAIL = 'openscienceframework-noreply@osf.io' # support email OSF_SUPPORT_EMAIL = 'support@osf.io' # contact email OSF_CONTACT_EMAIL = 'contact@osf.io' # prereg email PREREG_EMAIL = 'prereg@cos.io' # Default settings for fake email address generation FAKE_EMAIL_NAME = 'freddiemercury' FAKE_EMAIL_DOMAIN = 'cos.io' # SMTP Settings MAIL_SERVER = 'smtp.sendgrid.net' MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = '' # Set this in local.py # OR, if using Sendgrid's API # WARNING: If `SENDGRID_WHITELIST_MODE` is True, # `tasks.send_email` would only email recipients included in `SENDGRID_EMAIL_WHITELIST` SENDGRID_API_KEY = None SENDGRID_WHITELIST_MODE = False SENDGRID_EMAIL_WHITELIST = [] # Mailchimp MAILCHIMP_API_KEY = None MAILCHIMP_WEBHOOK_SECRET_KEY = 'CHANGEME' # OSF secret key to ensure webhook is secure ENABLE_EMAIL_SUBSCRIPTIONS = True MAILCHIMP_GENERAL_LIST = 'Open Science Framework General' #Triggered emails OSF_HELP_LIST = 'Open Science Framework Help' PREREG_AGE_LIMIT = timedelta(weeks=12) PREREG_WAIT_TIME = timedelta(weeks=2) WAIT_BETWEEN_MAILS = timedelta(days=7) NO_ADDON_WAIT_TIME = timedelta(weeks=8) NO_LOGIN_WAIT_TIME = timedelta(weeks=4) WELCOME_OSF4M_WAIT_TIME = timedelta(weeks=2) NO_LOGIN_OSF4M_WAIT_TIME = timedelta(weeks=6) NEW_PUBLIC_PROJECT_WAIT_TIME = timedelta(hours=24) WELCOME_OSF4M_WAIT_TIME_GRACE = timedelta(days=12) # TODO: Override in local.py MAILGUN_API_KEY = None # Use Celery for file rendering USE_CELERY = True # Trashed File Retention PURGE_DELTA = timedelta(days=30) # TODO: Override in local.py in production DB_HOST = 'localhost' DB_PORT = os_env.get('OSF_DB_PORT', 27017) # TODO: Configuration should not change between deploys - this should be dynamic. COOKIE_DOMAIN = '.openscienceframework.org' # Beaker # TODO: Combine Python and JavaScript config # If you change COMMENT_MAXLENGTH, make sure you create a corresponding migration. COMMENT_MAXLENGTH = 1000 # Profile image options PROFILE_IMAGE_LARGE = 70 PROFILE_IMAGE_MEDIUM = 40 # Currently (8/21/2017) only gravatar supported. PROFILE_IMAGE_PROVIDER = 'gravatar' # Conference options CONFERENCE_MIN_COUNT = 5 WIKI_WHITELIST = { 'tags': [ 'a', 'abbr', 'acronym', 'b', 'bdo', 'big', 'blockquote', 'br', 'center', 'cite', 'code', 'dd', 'del', 'dfn', 'div', 'dl', 'dt', 'em', 'embed', 'font', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'ins', 'kbd', 'li', 'object', 'ol', 'param', 'pre', 'p', 'q', 's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup', 'table', 'tbody', 'td', 'th', 'thead', 'tr', 'tt', 'ul', 'u', 'var', 'wbr', ], 'attributes': [ 'align', 'alt', 'border', 'cite', 'class', 'dir', 'height', 'href', 'id', 'src', 'style', 'title', 'type', 'width', 'face', 'size', # font tags 'salign', 'align', 'wmode', 'target', ], # Styles currently used in Reproducibility Project wiki pages 'styles': [ 'top', 'left', 'width', 'height', 'position', 'background', 'font-size', 'text-align', 'z-index', 'list-style', ] } # Maps category identifier => Human-readable representation for use in # titles, menus, etc. # Use an OrderedDict so that menu items show in the correct order NODE_CATEGORY_MAP = OrderedDict([ ('analysis', 'Analysis'), ('communication', 'Communication'), ('data', 'Data'), ('hypothesis', 'Hypothesis'), ('instrumentation', 'Instrumentation'), ('methods and measures', 'Methods and Measures'), ('procedure', 'Procedure'), ('project', 'Project'), ('software', 'Software'), ('other', 'Other'), ('', 'Uncategorized') ]) # Add-ons # Load addons from addons.json with open(os.path.join(ROOT, 'addons.json')) as fp: addon_settings = json.load(fp) ADDONS_REQUESTED = addon_settings['addons'] ADDONS_ARCHIVABLE = addon_settings['addons_archivable'] ADDONS_COMMENTABLE = addon_settings['addons_commentable'] ADDONS_BASED_ON_IDS = addon_settings['addons_based_on_ids'] ADDONS_DEFAULT = addon_settings['addons_default'] ADDONS_OAUTH_NO_REDIRECT = addon_settings['addons_oauth_no_redirect'] SYSTEM_ADDED_ADDONS = { 'user': [], 'node': [], } KEEN = { 'public': { 'project_id': None, 'master_key': 'changeme', 'write_key': '', 'read_key': '', }, 'private': { 'project_id': '', 'write_key': '', 'read_key': '', }, } SENTRY_DSN = None SENTRY_DSN_JS = None MISSING_FILE_NAME = 'untitled' # Most Popular and New and Noteworthy Nodes POPULAR_LINKS_NODE = None # TODO Override in local.py in production. POPULAR_LINKS_REGISTRATIONS = None # TODO Override in local.py in production. NEW_AND_NOTEWORTHY_LINKS_NODE = None # TODO Override in local.py in production. MAX_POPULAR_PROJECTS = 10 NEW_AND_NOTEWORTHY_CONTRIBUTOR_BLACKLIST = [] # TODO Override in local.py in production. # FOR EMERGENCIES ONLY: Setting this to True will disable forks, registrations, # and uploads in order to save disk space. DISK_SAVING_MODE = False # Seconds before another notification email can be sent to a contributor when added to a project CONTRIBUTOR_ADDED_EMAIL_THROTTLE = 24 * 3600 # Seconds before another notification email can be sent to a member when added to an OSFGroup GROUP_MEMBER_ADDED_EMAIL_THROTTLE = 24 * 3600 # Seconds before another notification email can be sent to group members when added to a project GROUP_CONNECTED_EMAIL_THROTTLE = 24 * 3600 # Google Analytics GOOGLE_ANALYTICS_ID = None GOOGLE_SITE_VERIFICATION = None DEFAULT_HMAC_SECRET = 'changeme' DEFAULT_HMAC_ALGORITHM = hashlib.sha256 WATERBUTLER_URL = 'http://localhost:7777' WATERBUTLER_INTERNAL_URL = WATERBUTLER_URL #################### # Identifiers # ################### DOI_URL_PREFIX = 'https://doi.org/' # General Format for DOIs DOI_FORMAT = '{prefix}/osf.io/{guid}' # datacite DATACITE_ENABLED = True DATACITE_USERNAME = None DATACITE_PASSWORD = None DATACITE_URL = 'https://mds.datacite.org' DATACITE_PREFIX = '10.70102' # Datacite's test DOI prefix -- update in production # crossref CROSSREF_USERNAME = None CROSSREF_PASSWORD = None CROSSREF_URL = None # Location to POST crossref data. In production, change this to the production CrossRef API endpoint CROSSREF_DEPOSITOR_EMAIL = 'None' # This email will receive confirmation/error messages from CrossRef on submission ECSARXIV_CROSSREF_USERNAME = None ECSARXIV_CROSSREF_PASSWORD = None # if our DOIs cannot be confirmed after X amount of days email the admin DAYS_CROSSREF_DOIS_MUST_BE_STUCK_BEFORE_EMAIL = 2 # Crossref has a second metadata api that uses JSON with different features CROSSREF_JSON_API_URL = 'https://api.crossref.org/' # Leave as `None` for production, test/staging/local envs must set SHARE_PROVIDER_PREPEND = None SHARE_ENABLED = True # This should be False for most local development SHARE_REGISTRATION_URL = '' SHARE_URL = 'https://share.osf.io/' SHARE_API_TOKEN = None # Required to send project updates to SHARE CAS_SERVER_URL = 'http://localhost:8080' MFR_SERVER_URL = 'http://localhost:7778' ###### ARCHIVER ########### ARCHIVE_PROVIDER = 'osfstorage' MAX_ARCHIVE_SIZE = 5 * 1024 ** 3 # == math.pow(1024, 3) == 1 GB ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours ENABLE_ARCHIVER = True JWT_SECRET = 'changeme' JWT_ALGORITHM = 'HS256' ##### CELERY ##### # Default RabbitMQ broker RABBITMQ_USERNAME = os.environ.get('RABBITMQ_USERNAME', 'guest') RABBITMQ_PASSWORD = os.environ.get('RABBITMQ_PASSWORD', 'guest') RABBITMQ_HOST = os.environ.get('RABBITMQ_HOST', 'localhost') RABBITMQ_PORT = os.environ.get('RABBITMQ_PORT', '5672') RABBITMQ_VHOST = os.environ.get('RABBITMQ_VHOST', '/') # Seconds, not an actual celery setting CELERY_RETRY_BACKOFF_BASE = 5 class CeleryConfig: """ Celery Configuration http://docs.celeryproject.org/en/latest/userguide/configuration.html """ timezone = 'UTC' task_default_queue = 'celery' task_low_queue = 'low' task_med_queue = 'med' task_high_queue = 'high' low_pri_modules = { 'framework.analytics.tasks', 'framework.celery_tasks', 'scripts.osfstorage.usage_audit', 'scripts.stuck_registration_audit', 'scripts.analytics.tasks', 'scripts.populate_new_and_noteworthy_projects', 'scripts.populate_popular_projects_and_registrations', 'website.search.elastic_search', 'scripts.generate_sitemap', 'scripts.analytics.run_keen_summaries', 'scripts.analytics.run_keen_snapshots', 'scripts.analytics.run_keen_events', 'scripts.clear_sessions', 'osf.management.commands.check_crossref_dois', 'osf.management.commands.find_spammy_files', 'osf.management.commands.migrate_pagecounter_data', 'osf.management.commands.migrate_deleted_date', 'osf.management.commands.addon_deleted_date', 'osf.management.commands.migrate_registration_responses', 'osf.management.commands.sync_collection_provider_indices', 'osf.management.commands.update_institution_project_counts' } med_pri_modules = { 'framework.email.tasks', 'scripts.send_queued_mails', 'scripts.triggered_mails', 'website.mailchimp_utils', 'website.notifications.tasks', 'website.collections.tasks', 'website.identifier.tasks', 'website.preprints.tasks', 'website.project.tasks', } high_pri_modules = { 'scripts.approve_embargo_terminations', 'scripts.approve_registrations', 'scripts.embargo_registrations', 'scripts.premigrate_created_modified', 'scripts.refresh_addon_tokens', 'scripts.retract_registrations', 'website.archiver.tasks', 'scripts.add_missing_identifiers_to_preprints' } try: from kombu import Queue, Exchange except ImportError: pass else: task_queues = ( Queue(task_low_queue, Exchange(task_low_queue), routing_key=task_low_queue, consumer_arguments={'x-priority': -1}), Queue(task_default_queue, Exchange(task_default_queue), routing_key=task_default_queue, consumer_arguments={'x-priority': 0}), Queue(task_med_queue, Exchange(task_med_queue), routing_key=task_med_queue, consumer_arguments={'x-priority': 1}), Queue(task_high_queue, Exchange(task_high_queue), routing_key=task_high_queue, consumer_arguments={'x-priority': 10}), ) task_default_exchange_type = 'direct' task_routes = ('framework.celery_tasks.routers.CeleryRouter', ) task_ignore_result = True task_store_errors_even_if_ignored = True broker_url = os.environ.get('BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST)) broker_use_ssl = False # Default RabbitMQ backend result_backend = 'django-db' # django-celery-results beat_scheduler = 'django_celery_beat.schedulers:DatabaseScheduler' # Modules to import when celery launches imports = ( 'framework.celery_tasks', 'framework.email.tasks', 'osf.external.tasks', 'osf.management.commands.data_storage_usage', 'osf.management.commands.registration_schema_metrics', 'website.mailchimp_utils', 'website.notifications.tasks', 'website.archiver.tasks', 'website.search.search', 'website.project.tasks', 'scripts.populate_new_and_noteworthy_projects', 'scripts.populate_popular_projects_and_registrations', 'scripts.refresh_addon_tokens', 'scripts.retract_registrations', 'scripts.embargo_registrations', 'scripts.approve_registrations', 'scripts.approve_embargo_terminations', 'scripts.triggered_mails', 'scripts.clear_sessions', 'scripts.send_queued_mails', 'scripts.analytics.run_keen_summaries', 'scripts.analytics.run_keen_snapshots', 'scripts.analytics.run_keen_events', 'scripts.generate_sitemap', 'scripts.premigrate_created_modified', 'scripts.add_missing_identifiers_to_preprints', 'osf.management.commands.deactivate_requested_accounts', 'osf.management.commands.check_crossref_dois', 'osf.management.commands.find_spammy_files', 'osf.management.commands.update_institution_project_counts', 'osf.management.commands.correct_registration_moderation_states', 'osf.management.commands.sync_collection_provider_indices', ) # Modules that need metrics and release requirements # imports += ( # 'scripts.osfstorage.usage_audit', # 'scripts.stuck_registration_audit', # 'scripts.analytics.tasks', # 'scripts.analytics.upload', # ) # celery.schedule will not be installed when running invoke requirements the first time. try: from celery.schedules import crontab except ImportError: pass else: # Setting up a scheduler, essentially replaces an independent cron job # Note: these times must be in UTC beat_schedule = { '5-minute-emails': { 'task': 'website.notifications.tasks.send_users_email', 'schedule': crontab(minute='*/5'), 'args': ('email_transactional',), }, 'daily-emails': { 'task': 'website.notifications.tasks.send_users_email', 'schedule': crontab(minute=0, hour=5), # Daily at 12 a.m. EST 'args': ('email_digest',), }, 'refresh_addons': { 'task': 'scripts.refresh_addon_tokens', 'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m 'kwargs': {'dry_run': False, 'addons': { 'box': 60, # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens 'googledrive': 14, # https://developers.google.com/identity/protocols/OAuth2#expiration 'mendeley': 14 # http://dev.mendeley.com/reference/topics/authorization_overview.html }}, }, 'retract_registrations': { 'task': 'scripts.retract_registrations', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, 'embargo_registrations': { 'task': 'scripts.embargo_registrations', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, 'add_missing_identifiers_to_preprints': { 'task': 'scripts.add_missing_identifiers_to_preprints', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, 'approve_registrations': { 'task': 'scripts.approve_registrations', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, 'approve_embargo_terminations': { 'task': 'scripts.approve_embargo_terminations', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, 'triggered_mails': { 'task': 'scripts.triggered_mails', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, 'clear_sessions': { 'task': 'scripts.clear_sessions', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, 'send_queued_mails': { 'task': 'scripts.send_queued_mails', 'schedule': crontab(minute=0, hour=17), # Daily 12 p.m. 'kwargs': {'dry_run': False}, }, 'new-and-noteworthy': { 'task': 'scripts.populate_new_and_noteworthy_projects', 'schedule': crontab(minute=0, hour=7, day_of_week=6), # Saturday 2:00 a.m. 'kwargs': {'dry_run': False} }, 'update_popular_nodes': { 'task': 'scripts.populate_popular_projects_and_registrations', 'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m. 'kwargs': {'dry_run': False} }, 'registration_schema_metrics': { 'task': 'management.commands.registration_schema_metrics', 'schedule': crontab(minute=45, hour=7, day_of_month=3), # Third day of month 2:45 a.m. 'kwargs': {'dry_run': False} }, 'run_keen_summaries': { 'task': 'scripts.analytics.run_keen_summaries', 'schedule': crontab(minute=0, hour=6), # Daily 1:00 a.m. 'kwargs': {'yesterday': True} }, # 'run_keen_snapshots': { # 'task': 'scripts.analytics.run_keen_snapshots', # 'schedule': crontab(minute=0, hour=8), # Daily 3:00 a.m. # }, 'run_keen_events': { 'task': 'scripts.analytics.run_keen_events', 'schedule': crontab(minute=0, hour=9), # Daily 4:00 a.m. 'kwargs': {'yesterday': True} }, # 'data_storage_usage': { # 'task': 'management.commands.data_storage_usage', # 'schedule': crontab(day_of_month=1, minute=30, hour=4), # Last of the month at 11:30 p.m. # }, # 'migrate_pagecounter_data': { # 'task': 'management.commands.migrate_pagecounter_data', # 'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m. # }, # 'migrate_registration_responses': { # 'task': 'management.commands.migrate_registration_responses', # 'schedule': crontab(minute=32, hour=7), # Daily 2:32 a.m. # 'migrate_deleted_date': { # 'task': 'management.commands.migrate_deleted_date', # 'schedule': crontab(minute=0, hour=3), # 'addon_deleted_date': { # 'task': 'management.commands.addon_deleted_date', # 'schedule': crontab(minute=0, hour=3), # Daily 11:00 p.m. # }, 'generate_sitemap': { 'task': 'scripts.generate_sitemap', 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. }, 'deactivate_requested_accounts': { 'task': 'management.commands.deactivate_requested_accounts', 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. }, 'check_crossref_doi': { 'task': 'management.commands.check_crossref_dois', 'schedule': crontab(minute=0, hour=4), # Daily 11:00 p.m. }, 'update_institution_project_counts': { 'task': 'management.commands.update_institution_project_counts', 'schedule': crontab(minute=0, hour=9), # Daily 05:00 a.m. EDT }, } # Tasks that need metrics and release requirements # beat_schedule.update({ # 'usage_audit': { # 'task': 'scripts.osfstorage.usage_audit', # 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m # 'kwargs': {'send_mail': True}, # }, # 'stuck_registration_audit': { # 'task': 'scripts.stuck_registration_audit', # 'schedule': crontab(minute=0, hour=11), # Daily 6 a.m # 'kwargs': {}, # }, # }) WATERBUTLER_JWE_SALT = 'yusaltydough' WATERBUTLER_JWE_SECRET = 'CirclesAre4Squares' WATERBUTLER_JWT_SECRET = 'ILiekTrianglesALot' WATERBUTLER_JWT_ALGORITHM = 'HS256' WATERBUTLER_JWT_EXPIRATION = 15 SENSITIVE_DATA_SALT = 'yusaltydough' SENSITIVE_DATA_SECRET = 'TrainglesAre5Squares' DRAFT_REGISTRATION_APPROVAL_PERIOD = datetime.timedelta(days=10) assert (DRAFT_REGISTRATION_APPROVAL_PERIOD > EMBARGO_END_DATE_MIN), 'The draft registration approval period should be more than the minimum embargo end date.' # TODO: Remove references to this flag ENABLE_INSTITUTIONS = True ENABLE_STORAGE_USAGE_CACHE = True ENABLE_VARNISH = False ENABLE_ESI = False VARNISH_SERVERS = [] # This should be set in local.py or cache invalidation won't work ESI_MEDIA_TYPES = {'application/vnd.api+json', 'application/json'} # Used for gathering meta information about the current build GITHUB_API_TOKEN = None # switch for disabling things that shouldn't happen during # the modm to django migration RUNNING_MIGRATION = False # External Identity Provider EXTERNAL_IDENTITY_PROFILE = { 'OrcidProfile': 'ORCID', } # Source: https://github.com/maxd/fake_email_validator/blob/master/config/fake_domains.list BLACKLISTED_DOMAINS = [ '0-mail.com', '0815.ru', '0815.su', '0clickemail.com', '0wnd.net', '0wnd.org', '10mail.org', '10minut.com.pl', '10minutemail.cf', '10minutemail.co.uk', '10minutemail.co.za', '10minutemail.com', '10minutemail.de', '10minutemail.eu', '10minutemail.ga', '10minutemail.gq', '10minutemail.info', '10minutemail.ml', '10minutemail.net', '10minutemail.org', '10minutemail.ru', '10minutemail.us', '10minutesmail.co.uk', '10minutesmail.com', '10minutesmail.eu', '10minutesmail.net', '10minutesmail.org', '10minutesmail.ru', '10minutesmail.us', '123-m.com', '15qm-mail.red', '15qm.com', '1chuan.com', '1mail.ml', '1pad.de', '1usemail.com', '1zhuan.com', '20mail.in', '20mail.it', '20minutemail.com', '2prong.com', '30minutemail.com', '30minutesmail.com', '33mail.com', '3d-painting.com', '3mail.ga', '4mail.cf', '4mail.ga', '4warding.com', '4warding.net', '4warding.org', '5mail.cf', '5mail.ga', '60minutemail.com', '675hosting.com', '675hosting.net', '675hosting.org', '6ip.us', '6mail.cf', '6mail.ga', '6mail.ml', '6paq.com', '6url.com', '75hosting.com', '75hosting.net', '75hosting.org', '7mail.ga', '7mail.ml', '7mail7.com', '7tags.com', '8mail.cf', '8mail.ga', '8mail.ml', '99experts.com', '9mail.cf', '9ox.net', 'a-bc.net', 'a45.in', 'abcmail.email', 'abusemail.de', 'abyssmail.com', 'acentri.com', 'advantimo.com', 'afrobacon.com', 'agedmail.com', 'ajaxapp.net', 'alivance.com', 'ama-trade.de', 'amail.com', 'amail4.me', 'amilegit.com', 'amiri.net', 'amiriindustries.com', 'anappthat.com', 'ano-mail.net', 'anobox.ru', 'anonbox.net', 'anonmails.de', 'anonymail.dk', 'anonymbox.com', 'antichef.com', 'antichef.net', 'antireg.ru', 'antispam.de', 'antispammail.de', 'appixie.com', 'armyspy.com', 'artman-conception.com', 'asdasd.ru', 'azmeil.tk', 'baxomale.ht.cx', 'beddly.com', 'beefmilk.com', 'beerolympics.se', 'bestemailaddress.net', 'bigprofessor.so', 'bigstring.com', 'binkmail.com', 'bio-muesli.net', 'bladesmail.net', 'bloatbox.com', 'bobmail.info', 'bodhi.lawlita.com', 'bofthew.com', 'bootybay.de', 'bossmail.de', 'boun.cr', 'bouncr.com', 'boxformail.in', 'boximail.com', 'boxtemp.com.br', 'breakthru.com', 'brefmail.com', 'brennendesreich.de', 'broadbandninja.com', 'bsnow.net', 'bspamfree.org', 'buffemail.com', 'bugmenot.com', 'bumpymail.com', 'bund.us', 'bundes-li.ga', 'burnthespam.info', 'burstmail.info', 'buymoreplays.com', 'buyusedlibrarybooks.org', 'byom.de', 'c2.hu', 'cachedot.net', 'card.zp.ua', 'casualdx.com', 'cbair.com', 'cdnqa.com', 'cek.pm', 'cellurl.com', 'cem.net', 'centermail.com', 'centermail.net', 'chammy.info', 'cheatmail.de', 'chewiemail.com', 'childsavetrust.org', 'chogmail.com', 'choicemail1.com', 'chong-mail.com', 'chong-mail.net', 'chong-mail.org', 'clixser.com', 'clrmail.com', 'cmail.net', 'cmail.org', 'coldemail.info', 'consumerriot.com', 'cool.fr.nf', 'correo.blogos.net', 'cosmorph.com', 'courriel.fr.nf', 'courrieltemporaire.com', 'crapmail.org', 'crazymailing.com', 'cubiclink.com', 'curryworld.de', 'cust.in', 'cuvox.de', 'd3p.dk', 'dacoolest.com', 'daintly.com', 'dandikmail.com', 'dayrep.com', 'dbunker.com', 'dcemail.com', 'deadaddress.com', 'deadfake.cf', 'deadfake.ga', 'deadfake.ml', 'deadfake.tk', 'deadspam.com', 'deagot.com', 'dealja.com', 'delikkt.de', 'despam.it', 'despammed.com', 'devnullmail.com', 'dfgh.net', 'digitalsanctuary.com', 'dingbone.com', 'dingfone.com', 'discard.cf', 'discard.email', 'discard.ga', 'discard.gq', 'discard.ml', 'discard.tk', 'discardmail.com', 'discardmail.de', 'dispomail.eu', 'disposable-email.ml', 'disposable.cf', 'disposable.ga', 'disposable.ml', 'disposableaddress.com', 'disposableemailaddresses.com', 'disposableinbox.com', 'dispose.it', 'disposeamail.com', 'disposemail.com', 'dispostable.com', 'divermail.com', 'dodgeit.com', 'dodgemail.de', 'dodgit.com', 'dodgit.org', 'dodsi.com', 'doiea.com', 'domozmail.com', 'donemail.ru', 'dontmail.net', 'dontreg.com', 'dontsendmespam.de', 'dotmsg.com', 'drdrb.com', 'drdrb.net', 'droplar.com', 'dropmail.me', 'duam.net', 'dudmail.com', 'dump-email.info', 'dumpandjunk.com', 'dumpmail.de', 'dumpyemail.com', 'duskmail.com', 'e-mail.com', 'e-mail.org', 'e4ward.com', 'easytrashmail.com', 'ee1.pl', 'ee2.pl', 'eelmail.com', 'einmalmail.de', 'einrot.com', 'einrot.de', 'eintagsmail.de', 'email-fake.cf', 'email-fake.com', 'email-fake.ga', 'email-fake.gq', 'email-fake.ml', 'email-fake.tk', 'email60.com', 'email64.com', 'emailage.cf', 'emailage.ga', 'emailage.gq', 'emailage.ml', 'emailage.tk', 'emaildienst.de', 'emailgo.de', 'emailias.com', 'emailigo.de', 'emailinfive.com', 'emaillime.com', 'emailmiser.com', 'emailproxsy.com', 'emails.ga', 'emailsensei.com', 'emailspam.cf', 'emailspam.ga', 'emailspam.gq', 'emailspam.ml', 'emailspam.tk', 'emailtemporanea.com', 'emailtemporanea.net', 'emailtemporar.ro', 'emailtemporario.com.br', 'emailthe.net', 'emailtmp.com', 'emailto.de', 'emailwarden.com', 'emailx.at.hm', 'emailxfer.com', 'emailz.cf', 'emailz.ga', 'emailz.gq', 'emailz.ml', 'emeil.in', 'emeil.ir', 'emeraldwebmail.com', 'emil.com', 'emkei.cf', 'emkei.ga', 'emkei.gq', 'emkei.ml', 'emkei.tk', 'emz.net', 'enterto.com', 'ephemail.net', 'ero-tube.org', 'etranquil.com', 'etranquil.net', 'etranquil.org', 'evopo.com', 'example.com', 'explodemail.com', 'express.net.ua', 'eyepaste.com', 'facebook-email.cf', 'facebook-email.ga', 'facebook-email.ml', 'facebookmail.gq', 'facebookmail.ml', 'fake-box.com', 'fake-mail.cf', 'fake-mail.ga', 'fake-mail.ml', 'fakeinbox.cf', 'fakeinbox.com', 'fakeinbox.ga', 'fakeinbox.ml', 'fakeinbox.tk', 'fakeinformation.com', 'fakemail.fr', 'fakemailgenerator.com', 'fakemailz.com', 'fammix.com', 'fansworldwide.de', 'fantasymail.de', 'fastacura.com', 'fastchevy.com', 'fastchrysler.com', 'fastkawasaki.com', 'fastmazda.com', 'fastmitsubishi.com', 'fastnissan.com', 'fastsubaru.com', 'fastsuzuki.com', 'fasttoyota.com', 'fastyamaha.com', 'fatflap.com', 'fdfdsfds.com', 'fightallspam.com', 'fiifke.de', 'filzmail.com', 'fivemail.de', 'fixmail.tk', 'fizmail.com', 'fleckens.hu', 'flurre.com', 'flurred.com', 'flurred.ru', 'flyspam.com', 'footard.com', 'forgetmail.com', 'forward.cat', 'fr33mail.info', 'frapmail.com', 'free-email.cf', 'free-email.ga', 'freemails.cf', 'freemails.ga', 'freemails.ml', 'freundin.ru', 'friendlymail.co.uk', 'front14.org', 'fuckingduh.com', 'fudgerub.com', 'fux0ringduh.com', 'fyii.de', 'garliclife.com', 'gehensiemirnichtaufdensack.de', 'gelitik.in', 'germanmails.biz', 'get-mail.cf', 'get-mail.ga', 'get-mail.ml', 'get-mail.tk', 'get1mail.com', 'get2mail.fr', 'getairmail.cf', 'getairmail.com', 'getairmail.ga', 'getairmail.gq', 'getairmail.ml', 'getairmail.tk', 'getmails.eu', 'getonemail.com', 'getonemail.net', 'gfcom.com', 'ghosttexter.de', 'giantmail.de', 'girlsundertheinfluence.com', 'gishpuppy.com', 'gmial.com', 'goemailgo.com', 'gorillaswithdirtyarmpits.com', 'gotmail.com', 'gotmail.net', 'gotmail.org', 'gowikibooks.com', 'gowikicampus.com', 'gowikicars.com', 'gowikifilms.com', 'gowikigames.com', 'gowikimusic.com', 'gowikinetwork.com', 'gowikitravel.com', 'gowikitv.com', 'grandmamail.com', 'grandmasmail.com', 'great-host.in', 'greensloth.com', 'grr.la', 'gsrv.co.uk', 'guerillamail.biz', 'guerillamail.com', 'guerillamail.de', 'guerillamail.net', 'guerillamail.org', 'guerillamailblock.com', 'guerrillamail.biz', 'guerrillamail.com', 'guerrillamail.de', 'guerrillamail.info', 'guerrillamail.net', 'guerrillamail.org', 'guerrillamailblock.com', 'gustr.com', 'h8s.org', 'hacccc.com', 'haltospam.com', 'haqed.com', 'harakirimail.com', 'hartbot.de', 'hat-geld.de', 'hatespam.org', 'headstrong.de', 'hellodream.mobi', 'herp.in', 'hidemail.de', 'hideme.be', 'hidzz.com', 'hiru-dea.com', 'hmamail.com', 'hochsitze.com', 'hopemail.biz', 'hot-mail.cf', 'hot-mail.ga', 'hot-mail.gq', 'hot-mail.ml', 'hot-mail.tk', 'hotpop.com', 'hulapla.de', 'hushmail.com', 'ieatspam.eu', 'ieatspam.info', 'ieh-mail.de', 'ihateyoualot.info', 'iheartspam.org', 'ikbenspamvrij.nl', 'imails.info', 'imgof.com', 'imgv.de', 'imstations.com', 'inbax.tk', 'inbox.si', 'inboxalias.com', 'inboxclean.com', 'inboxclean.org', 'inboxproxy.com', 'incognitomail.com', 'incognitomail.net', 'incognitomail.org', 'ineec.net', 'infocom.zp.ua', 'inoutmail.de', 'inoutmail.eu', 'inoutmail.info', 'inoutmail.net', 'insorg-mail.info', 'instant-mail.de', 'instantemailaddress.com', 'instantlyemail.com', 'ip6.li', 'ipoo.org', 'irish2me.com', 'iwi.net', 'jetable.com', 'jetable.fr.nf', 'jetable.net', 'jetable.org', 'jnxjn.com', 'jourrapide.com', 'junk1e.com', 'junkmail.com', 'junkmail.ga', 'junkmail.gq', 'jupimail.com', 'kasmail.com', 'kaspop.com', 'keepmymail.com', 'killmail.com', 'killmail.net', 'kimsdisk.com', 'kingsq.ga', 'kiois.com', 'kir.ch.tc', 'klassmaster.com', 'klassmaster.net', 'klzlk.com', 'kook.ml', 'koszmail.pl', 'kulturbetrieb.info', 'kurzepost.de', 'l33r.eu', 'labetteraverouge.at', 'lackmail.net', 'lags.us', 'landmail.co', 'lastmail.co', 'lawlita.com', 'lazyinbox.com', 'legitmail.club', 'letthemeatspam.com', 'lhsdv.com', 'libox.fr', 'lifebyfood.com', 'link2mail.net', 'litedrop.com', 'loadby.us', 'login-email.cf', 'login-email.ga', 'login-email.ml', 'login-email.tk', 'lol.ovpn.to', 'lolfreak.net', 'lookugly.com', 'lopl.co.cc', 'lortemail.dk', 'lovemeleaveme.com', 'lr78.com', 'lroid.com', 'lukop.dk', 'm21.cc', 'm4ilweb.info', 'maboard.com', 'mail-filter.com', 'mail-temporaire.fr', 'mail.by', 'mail.mezimages.net', 'mail.zp.ua', 'mail114.net', 'mail1a.de', 'mail21.cc', 'mail2rss.org', 'mail333.com', 'mail4trash.com', 'mailbidon.com', 'mailbiz.biz', 'mailblocks.com', 'mailblog.biz', 'mailbucket.org', 'mailcat.biz', 'mailcatch.com', 'mailde.de', 'mailde.info', 'maildrop.cc', 'maildrop.cf', 'maildrop.ga', 'maildrop.gq', 'maildrop.ml', 'maildu.de', 'maildx.com', 'maileater.com', 'mailed.ro', 'maileimer.de', 'mailexpire.com', 'mailfa.tk', 'mailforspam.com', 'mailfree.ga', 'mailfree.gq', 'mailfree.ml', 'mailfreeonline.com', 'mailfs.com', 'mailguard.me', 'mailhazard.com', 'mailhazard.us', 'mailhz.me', 'mailimate.com', 'mailin8r.com', 'mailinater.com', 'mailinator.com', 'mailinator.gq', 'mailinator.net', 'mailinator.org', 'mailinator.us', 'mailinator2.com', 'mailinator2.net', 'mailincubator.com', 'mailismagic.com', 'mailjunk.cf', 'mailjunk.ga', 'mailjunk.gq', 'mailjunk.ml', 'mailjunk.tk', 'mailmate.com', 'mailme.gq', 'mailme.ir', 'mailme.lv', 'mailme24.com', 'mailmetrash.com', 'mailmoat.com', 'mailms.com', 'mailnator.com', 'mailnesia.com', 'mailnull.com', 'mailorg.org', 'mailpick.biz', 'mailproxsy.com', 'mailquack.com', 'mailrock.biz', 'mailscrap.com', 'mailshell.com', 'mailsiphon.com', 'mailslapping.com', 'mailslite.com', 'mailspeed.ru', 'mailtemp.info', 'mailtome.de', 'mailtothis.com', 'mailtrash.net', 'mailtv.net', 'mailtv.tv', 'mailzilla.com', 'mailzilla.org', 'mailzilla.orgmbx.cc', 'makemetheking.com', 'mallinator.com', 'manifestgenerator.com', 'manybrain.com', 'mbx.cc', 'mciek.com', 'mega.zik.dj', 'meinspamschutz.de', 'meltmail.com', 'messagebeamer.de', 'mezimages.net', 'mfsa.ru', 'mierdamail.com', 'migmail.pl', 'migumail.com', 'mindless.com', 'ministry-of-silly-walks.de', 'mintemail.com', 'misterpinball.de', 'mjukglass.nu', 'moakt.com', 'mobi.web.id', 'mobileninja.co.uk', 'moburl.com', 'mohmal.com', 'moncourrier.fr.nf', 'monemail.fr.nf', 'monmail.fr.nf', 'monumentmail.com', 'msa.minsmail.com', 'mt2009.com', 'mt2014.com', 'mt2015.com', 'mx0.wwwnew.eu', 'my10minutemail.com', 'myalias.pw', 'mycard.net.ua', 'mycleaninbox.net', 'myemailboxy.com', 'mymail-in.net', 'mymailoasis.com', 'mynetstore.de', 'mypacks.net', 'mypartyclip.de', 'myphantomemail.com', 'myrambler.ru', 'mysamp.de', 'myspaceinc.com', 'myspaceinc.net', 'myspaceinc.org', 'myspacepimpedup.com', 'myspamless.com', 'mytemp.email', 'mytempemail.com', 'mytempmail.com', 'mytrashmail.com', 'nabuma.com', 'neomailbox.com', 'nepwk.com', 'nervmich.net', 'nervtmich.net', 'netmails.com', 'netmails.net', 'netzidiot.de', 'neverbox.com', 'nice-4u.com', 'nincsmail.com', 'nincsmail.hu', 'nmail.cf', 'nnh.com', 'no-spam.ws', 'noblepioneer.com', 'nobulk.com', 'noclickemail.com', 'nogmailspam.info', 'nomail.pw', 'nomail.xl.cx', 'nomail2me.com', 'nomorespamemails.com', 'nonspam.eu', 'nonspammer.de', 'noref.in', 'nospam.ze.tc', 'nospam4.us', 'nospamfor.us', 'nospammail.net', 'nospamthanks.info', 'notmailinator.com', 'notsharingmy.info', 'nowhere.org', 'nowmymail.com', 'nurfuerspam.de', 'nwldx.com', 'objectmail.com', 'obobbo.com', 'odaymail.com', 'odnorazovoe.ru', 'one-time.email', 'oneoffemail.com', 'oneoffmail.com', 'onewaymail.com', 'onlatedotcom.info', 'online.ms', 'oopi.org', 'opayq.com', 'opentrash.com', 'ordinaryamerican.net', 'otherinbox.com', 'ourklips.com', 'outlawspam.com', 'ovpn.to', 'owlpic.com', 'pancakemail.com', 'paplease.com', 'pepbot.com', 'pfui.ru', 'pimpedupmyspace.com', 'pjjkp.com', 'plexolan.de', 'poczta.onet.pl', 'politikerclub.de', 'poofy.org', 'pookmail.com', 'pop3.xyz', 'postalmail.biz', 'privacy.net', 'privatdemail.net', 'privy-mail.com', 'privymail.de', 'proxymail.eu', 'prtnx.com', 'prtz.eu', 'pubmail.io', 'punkass.com', 'putthisinyourspamdatabase.com', 'pwrby.com', 'q314.net', 'qisdo.com', 'qisoa.com', 'qoika.com', 'qq.com', 'quickinbox.com', 'quickmail.nl', 'rambler.ru', 'rainmail.biz', 'rcpt.at', 're-gister.com', 'reallymymail.com', 'realtyalerts.ca', 'recode.me', 'reconmail.com', 'recursor.net', 'recyclemail.dk', 'regbypass.com', 'regbypass.comsafe-mail.net', 'rejectmail.com', 'reliable-mail.com', 'remail.cf', 'remail.ga', 'renraku.in', 'rhyta.com', 'rklips.com', 'rmqkr.net', 'royal.net', 'rppkn.com', 'rtrtr.com', 's0ny.net', 'safe-mail.net', 'safersignup.de', 'safetymail.info', 'safetypost.de', 'sandelf.de', 'sayawaka-dea.info', 'saynotospams.com', 'scatmail.com', 'schafmail.de', 'schrott-email.de', 'secretemail.de', 'secure-mail.biz', 'secure-mail.cc', 'selfdestructingmail.com', 'selfdestructingmail.org', 'sendspamhere.com', 'senseless-entertainment.com', 'services391.com', 'sharedmailbox.org', 'sharklasers.com', 'shieldedmail.com', 'shieldemail.com', 'shiftmail.com', 'shitmail.me', 'shitmail.org', 'shitware.nl', 'shmeriously.com', 'shortmail.net', 'showslow.de', 'sibmail.com', 'sinnlos-mail.de', 'siteposter.net', 'skeefmail.com', 'slapsfromlastnight.com', 'slaskpost.se', 'slipry.net', 'slopsbox.com', 'slowslow.de', 'slushmail.com', 'smashmail.de', 'smellfear.com', 'smellrear.com', 'smoug.net', 'snakemail.com', 'sneakemail.com', 'sneakmail.de', 'snkmail.com', 'sofimail.com', 'sofort-mail.de', 'softpls.asia', 'sogetthis.com', 'soisz.com', 'solvemail.info', 'soodonims.com', 'spam.la', 'spam.su', 'spam4.me', 'spamail.de', 'spamarrest.com', 'spamavert.com', 'spambob.com', 'spambob.net', 'spambob.org', 'spambog.com', 'spambog.de', 'spambog.net', 'spambog.ru', 'spambooger.com', 'spambox.info', 'spambox.irishspringrealty.com', 'spambox.us', 'spambpg.com', 'spamcannon.com', 'spamcannon.net', 'spamcero.com', 'spamcon.org', 'spamcorptastic.com', 'spamcowboy.com', 'spamcowboy.net', 'spamcowboy.org', 'spamday.com', 'spamex.com', 'spamfighter.cf', 'spamfighter.ga', 'spamfighter.gq', 'spamfighter.ml', 'spamfighter.tk', 'spamfree.eu', 'spamfree24.com', 'spamfree24.de', 'spamfree24.eu', 'spamfree24.info', 'spamfree24.net', 'spamfree24.org', 'spamgoes.in', 'spamgourmet.com', 'spamgourmet.net', 'spamgourmet.org', 'spamherelots.com', 'spamhereplease.com', 'spamhole.com', 'spamify.com', 'spaminator.de', 'spamkill.info', 'spaml.com', 'spaml.de', 'spammotel.com', 'spamobox.com', 'spamoff.de', 'spamsalad.in', 'spamslicer.com', 'spamsphere.com', 'spamspot.com', 'spamstack.net', 'spamthis.co.uk', 'spamthisplease.com', 'spamtrail.com', 'spamtroll.net', 'speed.1s.fr', 'spikio.com', 'spoofmail.de', 'spybox.de', 'squizzy.de', 'ssoia.com', 'startkeys.com', 'stexsy.com', 'stinkefinger.net', 'stop-my-spam.cf', 'stop-my-spam.com', 'stop-my-spam.ga', 'stop-my-spam.ml', 'stop-my-spam.tk', 'streetwisemail.com', 'stuffmail.de', 'super-auswahl.de', 'supergreatmail.com', 'supermailer.jp', 'superrito.com', 'superstachel.de', 'suremail.info', 'sute.jp', 'svk.jp', 'sweetxxx.de', 'tafmail.com', 'tagyourself.com', 'talkinator.com', 'tapchicuoihoi.com', 'teewars.org', 'teleworm.com', 'teleworm.us', 'temp-mail.com', 'temp-mail.net', 'temp-mail.org', 'temp-mail.ru', 'temp15qm.com', 'tempail.com', 'tempalias.com', 'tempe-mail.com', 'tempemail.biz', 'tempemail.co.za', 'tempemail.com', 'tempemail.net', 'tempemail.org', 'tempinbox.co.uk', 'tempinbox.com', 'tempmail.de', 'tempmail.eu', 'tempmail.it', 'tempmail2.com', 'tempmaildemo.com', 'tempmailer.com', 'tempmailer.de', 'tempomail.fr', 'temporarily.de', 'temporarioemail.com.br', 'temporaryemail.net', 'temporaryemail.us', 'temporaryforwarding.com', 'temporaryinbox.com', 'temporarymailaddress.com', 'tempsky.com', 'tempthe.net', 'tempymail.com', 'test.com', 'thanksnospam.info', 'thankyou2010.com', 'thc.st', 'thecloudindex.com', 'thisisnotmyrealemail.com', 'thismail.net', 'thismail.ru', 'throam.com', 'throwam.com', 'throwawayemailaddress.com', 'throwawaymail.com', 'tilien.com', 'tittbit.in', 'tizi.com', 'tmail.ws', 'tmailinator.com', 'tmpeml.info', 'toiea.com', 'tokenmail.de', 'toomail.biz', 'topranklist.de', 'tormail.net', 'tormail.org', 'tradermail.info', 'trash-amil.com', 'trash-mail.at', 'trash-mail.cf', 'trash-mail.com', 'trash-mail.de', 'trash-mail.ga', 'trash-mail.gq', 'trash-mail.ml', 'trash-mail.tk', 'trash-me.com', 'trash2009.com', 'trash2010.com', 'trash2011.com', 'trashdevil.com', 'trashdevil.de', 'trashemail.de', 'trashmail.at', 'trashmail.com', 'trashmail.de', 'trashmail.me', 'trashmail.net', 'trashmail.org', 'trashmail.ws', 'trashmailer.com', 'trashymail.com', 'trashymail.net', 'trayna.com', 'trbvm.com', 'trialmail.de', 'trickmail.net', 'trillianpro.com', 'tryalert.com', 'turual.com', 'twinmail.de', 'twoweirdtricks.com', 'tyldd.com', 'ubismail.net', 'uggsrock.com', 'umail.net', 'unlimit.com', 'unmail.ru', 'upliftnow.com', 'uplipht.com', 'uroid.com', 'us.af', 'valemail.net', 'venompen.com', 'vermutlich.net', 'veryrealemail.com', 'vidchart.com', 'viditag.com', 'viewcastmedia.com', 'viewcastmedia.net', 'viewcastmedia.org', 'viralplays.com', 'vmail.me', 'voidbay.com', 'vomoto.com', 'vpn.st', 'vsimcard.com', 'vubby.com', 'w3internet.co.uk', 'walala.org', 'walkmail.net', 'watchever.biz', 'webemail.me', 'webm4il.info', 'webuser.in', 'wee.my', 'weg-werf-email.de', 'wegwerf-email-addressen.de', 'wegwerf-email.at', 'wegwerf-emails.de', 'wegwerfadresse.de', 'wegwerfemail.com', 'wegwerfemail.de', 'wegwerfmail.de', 'wegwerfmail.info', 'wegwerfmail.net', 'wegwerfmail.org', 'wem.com', 'wetrainbayarea.com', 'wetrainbayarea.org', 'wh4f.org', 'whatiaas.com', 'whatpaas.com', 'whatsaas.com', 'whopy.com', 'whyspam.me', 'wickmail.net', 'wilemail.com', 'willhackforfood.biz', 'willselfdestruct.com', 'winemaven.info', 'wmail.cf', 'writeme.com', 'wronghead.com', 'wuzup.net', 'wuzupmail.net', 'wwwnew.eu', 'wzukltd.com', 'xagloo.com', 'xemaps.com', 'xents.com', 'xmaily.com', 'xoxy.net', 'xww.ro', 'xyzfree.net', 'yapped.net', 'yep.it', 'yogamaven.com', 'yomail.info', 'yopmail.com', 'yopmail.fr', 'yopmail.gq', 'yopmail.net', 'yopmail.org', 'yoru-dea.com', 'you-spam.com', 'youmail.ga', 'yourdomain.com', 'ypmail.webarnak.fr.eu.org', 'yuurok.com', 'yyhmail.com', 'z1p.biz', 'za.com', 'zebins.com', 'zebins.eu', 'zehnminuten.de', 'zehnminutenmail.de', 'zetmail.com', 'zippymail.info', 'zoaxe.com', 'zoemail.com', 'zoemail.net', 'zoemail.org', 'zomg.info', 'zxcv.com', 'zxcvbnm.com', 'zzz.com', ] # reCAPTCHA API # NOTE: Using the recaptcha.net domain h/t https://github.com/google/recaptcha/issues/87#issuecomment-368252094 RECAPTCHA_SITE_KEY = None RECAPTCHA_SECRET_KEY = None RECAPTCHA_VERIFY_URL = 'https://recaptcha.net/recaptcha/api/siteverify' # akismet spam check AKISMET_APIKEY = None AKISMET_ENABLED = False # OOPSpam options OOPSPAM_APIKEY = None OOPSPAM_SPAM_LEVEL = 3 # The minimum level (out of 6) that is flagged as spam. OOPSPAM_CHECK_IP = True # Whether OOPSpam checks IP addresses. When testing locally, turn this off # spam options SPAM_CHECK_ENABLED = False SPAM_CHECK_PUBLIC_ONLY = True SPAM_ACCOUNT_SUSPENSION_ENABLED = False SPAM_ACCOUNT_SUSPENSION_THRESHOLD = timedelta(hours=24) SPAM_FLAGGED_MAKE_NODE_PRIVATE = False SPAM_FLAGGED_REMOVE_FROM_SEARCH = False SHARE_API_TOKEN = None # refresh campaign every 5 minutes CAMPAIGN_REFRESH_THRESHOLD = 5 * 60 # 5 minutes in seconds AWS_ACCESS_KEY_ID = None AWS_SECRET_ACCESS_KEY = None # sitemap default settings SITEMAP_TO_S3 = False SITEMAP_AWS_BUCKET = None SITEMAP_URL_MAX = 25000 SITEMAP_INDEX_MAX = 50000 SITEMAP_STATIC_URLS = [ OrderedDict([('loc', ''), ('changefreq', 'yearly'), ('priority', '0.5')]), OrderedDict([('loc', 'preprints'), ('changefreq', 'yearly'), ('priority', '0.5')]), OrderedDict([('loc', 'prereg'), ('changefreq', 'yearly'), ('priority', '0.5')]), OrderedDict([('loc', 'meetings'), ('changefreq', 'yearly'), ('priority', '0.5')]), OrderedDict([('loc', 'registries'), ('changefreq', 'yearly'), ('priority', '0.5')]), OrderedDict([('loc', 'reviews'), ('changefreq', 'yearly'), ('priority', '0.5')]), OrderedDict([('loc', 'explore/activity'), ('changefreq', 'weekly'), ('priority', '0.5')]), OrderedDict([('loc', 'support'), ('changefreq', 'yearly'), ('priority', '0.5')]), OrderedDict([('loc', 'faq'), ('changefreq', 'yearly'), ('priority', '0.5')]), ] SITEMAP_USER_CONFIG = OrderedDict([('loc', ''), ('changefreq', 'yearly'), ('priority', '0.5')]) SITEMAP_NODE_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'monthly'), ('priority', '0.5')]) SITEMAP_PREPRINT_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'yearly'), ('priority', '0.5')]) SITEMAP_PREPRINT_FILE_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'yearly'), ('priority', '0.5')]) # For preventing indexing of QA nodes by Elastic and SHARE DO_NOT_INDEX_LIST = { 'tags': ['qatest', 'qa test'], 'titles': ['Bulk stress 201', 'Bulk stress 202', 'OSF API Registration test'], } CUSTOM_CITATIONS = { 'bluebook-law-review': 'bluebook', 'bluebook2': 'bluebook', 'bluebook-inline': 'bluebook' } #Email templates logo OSF_LOGO = 'osf_logo' OSF_PREPRINTS_LOGO = 'osf_preprints' OSF_MEETINGS_LOGO = 'osf_meetings' OSF_PREREG_LOGO = 'osf_prereg' OSF_REGISTRIES_LOGO = 'osf_registries' OSF_LOGO_LIST = [OSF_LOGO, OSF_PREPRINTS_LOGO, OSF_MEETINGS_LOGO, OSF_PREREG_LOGO, OSF_REGISTRIES_LOGO] FOOTER_LINKS = { 'terms': 'https://github.com/CenterForOpenScience/centerforopenscience.org/blob/master/TERMS_OF_USE.md', 'privacyPolicy': 'https://github.com/CenterForOpenScience/centerforopenscience.org/blob/master/PRIVACY_POLICY.md', 'cookies': 'https://github.com/CenterForOpenScience/centerforopenscience.org/blob/master/PRIVACY_POLICY.md#f-cookies', 'cos': 'https://cos.io', 'statusPage': 'https://status.cos.io/', 'apiDocs': 'https://developer.osf.io/', 'topGuidelines': 'http://cos.io/top/', 'rpp': 'https://osf.io/ezcuj/wiki/home/', 'rpcb': 'https://osf.io/e81xl/wiki/home/', 'twitter': 'http://twitter.com/OSFramework', 'facebook': 'https://www.facebook.com/CenterForOpenScience/', 'googleGroup': 'https://groups.google.com/forum/#!forum/openscienceframework', 'github': 'https://www.github.com/centerforopenscience', } CHRONOS_USE_FAKE_FILE = False CHRONOS_FAKE_FILE_URL = '' CHRONOS_USERNAME = os_env.get('CHRONOS_USERNAME', '') CHRONOS_PASSWORD = os_env.get('CHRONOS_PASSWORD', '') CHRONOS_API_KEY = os_env.get('CHRONOS_API_KEY', '') CHRONOS_HOST = os_env.get('CHRONOS_HOST', 'https://sandbox.api.chronos-oa.com') VERIFY_CHRONOS_SSL_CERT = not DEV_MODE # Maximum minutes we allow ChronosSubmission status to be stale (only update when user is requesting it) CHRONOS_SUBMISSION_UPDATE_TIME = timedelta(minutes=5) DS_METRICS_OSF_TOKEN = None DS_METRICS_BASE_FOLDER = None REG_METRICS_OSF_TOKEN = None REG_METRICS_BASE_FOLDER = None STORAGE_WARNING_THRESHOLD = .9 # percent of maximum storage used before users get a warning message STORAGE_LIMIT_PUBLIC = 50 STORAGE_LIMIT_PRIVATE = 5 GBs = 10 ** 9 # Needs to be here so the enum can be used in the admin template def forDjango(cls): cls.do_not_call_in_templates = True return cls @forDjango @enum.unique class StorageLimits(enum.IntEnum): """ Values here are in GBs """ NOT_CALCULATED = 0 DEFAULT = 1 APPROACHING_PRIVATE = 2 OVER_PRIVATE = 3 APPROACHING_PUBLIC = 4 OVER_PUBLIC = 5 @classmethod def from_node_usage(cls, usage_bytes, private_limit=None, public_limit=None): """ This should indicate if a node is at or over a certain storage threshold indicating a status.""" public_limit = public_limit or STORAGE_LIMIT_PUBLIC private_limit = private_limit or STORAGE_LIMIT_PRIVATE if usage_bytes is None: return cls.NOT_CALCULATED if usage_bytes >= float(public_limit) * GBs: return cls.OVER_PUBLIC elif usage_bytes >= float(public_limit) * STORAGE_WARNING_THRESHOLD * GBs: return cls.APPROACHING_PUBLIC elif usage_bytes >= float(private_limit) * GBs: return cls.OVER_PRIVATE elif usage_bytes >= float(private_limit) * STORAGE_WARNING_THRESHOLD * GBs: return cls.APPROACHING_PRIVATE else: return cls.DEFAULT STORAGE_USAGE_CACHE_TIMEOUT = 3600 * 24 # seconds in hour times hour (one day)
from __future__ import unicode_literals import itsdangerous import mock import pytest import pytz from django.utils import timezone from addons.base.utils import get_mfr_url from addons.github.models import GithubFileNode from addons.osfstorage import settings as osfstorage_settings from addons.osfstorage.listeners import checkin_files_task from api.base.settings.defaults import API_BASE from api_tests import utils as api_utils from framework.auth.core import Auth from osf.models import NodeLog, Session, QuickFilesNode from osf.utils.permissions import WRITE, READ from osf.utils.workflows import DefaultStates from osf_tests.factories import ( AuthUserFactory, CommentFactory, ProjectFactory, UserFactory, PreprintFactory, ) from website import settings as website_settings # stolen from^W^Winspired by DRF # rest_framework.fields.DateTimeField.to_representation def _dt_to_iso8601(value): iso8601 = value.isoformat() if iso8601.endswith('+00:00'): iso8601 = iso8601[:-6] + 'Z' # microsecond precision return iso8601 @pytest.fixture() def user(): return AuthUserFactory() @pytest.mark.django_db @pytest.mark.enable_quickfiles_creation class TestFileView: @pytest.fixture() def node(self, user): return ProjectFactory(creator=user, comment_level='public') @pytest.fixture() def quickfiles_node(self, user): return QuickFilesNode.objects.get(creator=user) @pytest.fixture() def file(self, user, node): return api_utils.create_test_file(node, user, create_guid=False) @pytest.fixture() def file_url(self, file): return '/{}files/{}/'.format(API_BASE, file._id) def test_must_have_auth_and_be_contributor(self, app, file_url): # test_must_have_auth(self, app, file_url): res = app.get(file_url, expect_errors=True) assert res.status_code == 401 # test_must_be_contributor(self, app, file_url): non_contributor = AuthUserFactory() res = app.get(file_url, auth=non_contributor.auth, expect_errors=True) assert res.status_code == 403 def test_deleted_file_return_410(self, app, node, user): deleted_file = api_utils.create_test_file(node, user, create_guid=True) url_with_guid = '/{}files/{}/'.format( API_BASE, deleted_file.get_guid()._id ) url_with_id = '/{}files/{}/'.format(API_BASE, deleted_file._id) res = app.get(url_with_guid, auth=user.auth) assert res.status_code == 200 res = app.get(url_with_id, auth=user.auth) assert res.status_code == 200 deleted_file.delete(user=user, save=True) res = app.get(url_with_guid, auth=user.auth, expect_errors=True) assert res.status_code == 410 res = app.get(url_with_id, auth=user.auth, expect_errors=True) assert res.status_code == 410 def test_disabled_users_quickfiles_file_detail_gets_410(self, app, quickfiles_node, user): file_node = api_utils.create_test_file(quickfiles_node, user, create_guid=True) url_with_guid = '/{}files/{}/'.format( API_BASE, file_node.get_guid()._id ) url_with_id = '/{}files/{}/'.format(API_BASE, file_node._id) res = app.get(url_with_id) assert res.status_code == 200 res = app.get(url_with_guid, auth=user.auth) assert res.status_code == 200 user.is_disabled = True user.save() res = app.get(url_with_id, expect_errors=True) assert res.json['errors'][0]['detail'] == 'This user has been deactivated and their' \ ' quickfiles are no longer available.' assert res.status_code == 410 res = app.get(url_with_guid, expect_errors=True) assert res.json['errors'][0]['detail'] == 'This user has been deactivated and their' \ ' quickfiles are no longer available.' assert res.status_code == 410 def test_file_guid_guid_status(self, app, user, file, file_url): # test_unvisited_file_has_no_guid res = app.get(file_url, auth=user.auth) assert res.status_code == 200 assert res.json['data']['attributes']['guid'] is None # test_visited_file_has_guid guid = file.get_guid(create=True) res = app.get(file_url, auth=user.auth) assert res.status_code == 200 assert guid is not None assert res.json['data']['attributes']['guid'] == guid._id def test_file_with_wrong_guid(self, app, user): url = '/{}files/{}/'.format(API_BASE, user._id) res = app.get(url, auth=user.auth, expect_errors=True) assert res.status_code == 404 @mock.patch('api.base.throttling.CreateGuidThrottle.allow_request') def test_file_guid_not_created_with_basic_auth( self, mock_allow, app, user, file_url): res = app.get('{}?create_guid=1'.format(file_url), auth=user.auth) guid = res.json['data']['attributes'].get('guid', None) assert res.status_code == 200 assert mock_allow.call_count == 1 assert guid is None @mock.patch('api.base.throttling.CreateGuidThrottle.allow_request') def test_file_guid_created_with_cookie( self, mock_allow, app, user, file_url, file): session = Session(data={'auth_user_id': user._id}) session.save() cookie = itsdangerous.Signer( website_settings.SECRET_KEY ).sign(session._id) app.set_cookie(website_settings.COOKIE_NAME, cookie.decode()) res = app.get('{}?create_guid=1'.format(file_url), auth=user.auth) app.reset() # clear cookie assert res.status_code == 200 guid = res.json['data']['attributes'].get('guid', None) assert guid is not None assert guid == file.get_guid()._id assert mock_allow.call_count == 1 def test_get_file(self, app, user, file_url, file): res = app.get(file_url, auth=user.auth) file.versions.first().reload() assert res.status_code == 200 assert set(res.json.keys()) == {'meta', 'data'} attributes = res.json['data']['attributes'] assert attributes['path'] == file.path assert attributes['kind'] == file.kind assert attributes['name'] == file.name assert attributes['materialized_path'] == file.materialized_path assert attributes['last_touched'] is None assert attributes['provider'] == file.provider assert attributes['size'] == file.versions.first().size assert attributes['current_version'] == len(file.history) assert attributes['date_modified'] == _dt_to_iso8601( file.versions.first().created.replace(tzinfo=pytz.utc) ) assert attributes['date_created'] == _dt_to_iso8601( file.versions.last().created.replace(tzinfo=pytz.utc) ) assert attributes['extra']['hashes']['md5'] is None assert attributes['extra']['hashes']['sha256'] is None assert attributes['tags'] == [] # make sure download link has a trailing slash # so that downloads don't 301 assert res.json['data']['links']['download'].endswith('/') def test_file_has_rel_link_to_owning_project( self, app, user, file_url, node): res = app.get(file_url, auth=user.auth) assert res.status_code == 200 assert 'target' in res.json['data']['relationships'].keys() expected_url = node.api_v2_url actual_url = res.json['data']['relationships']['target']['links']['related']['href'] assert expected_url in actual_url def test_file_has_comments_link(self, app, user, file, file_url): file.get_guid(create=True) res = app.get(file_url, auth=user.auth) assert res.status_code == 200 assert 'comments' in res.json['data']['relationships'].keys() url = res.json['data']['relationships']['comments']['links']['related']['href'] assert app.get(url, auth=user.auth).status_code == 200 assert res.json['data']['type'] == 'files' def test_file_has_correct_unread_comments_count( self, app, user, file, node): contributor = AuthUserFactory() node.add_contributor(contributor, auth=Auth(user), save=True) CommentFactory( node=node, target=file.get_guid(create=True), user=contributor, page='files' ) res = app.get( '/{}files/{}/?related_counts=True'.format(API_BASE, file._id), auth=user.auth ) assert res.status_code == 200 unread_comments = res.json['data']['relationships']['comments']['links']['related']['meta']['unread'] assert unread_comments == 1 def test_only_project_contrib_can_comment_on_closed_project( self, app, user, node, file_url): node.comment_level = 'private' node.is_public = True node.save() res = app.get(file_url, auth=user.auth) can_comment = res.json['data']['attributes']['current_user_can_comment'] assert res.status_code == 200 assert can_comment is True non_contributor = AuthUserFactory() res = app.get(file_url, auth=non_contributor.auth) can_comment = res.json['data']['attributes']['current_user_can_comment'] assert res.status_code == 200 assert can_comment is False def test_logged_or_not_user_comment_status_on_open_project( self, app, node, file_url): node.is_public = True node.save() # test_any_loggedin_user_can_comment_on_open_project(self, app, node, # file_url): non_contributor = AuthUserFactory() res = app.get(file_url, auth=non_contributor.auth) can_comment = res.json['data']['attributes']['current_user_can_comment'] assert res.status_code == 200 assert can_comment is True # test_non_logged_in_user_cant_comment(self, app, file_url, node): res = app.get(file_url) can_comment = res.json['data']['attributes']['current_user_can_comment'] assert res.status_code == 200 assert can_comment is False def test_checkout(self, app, user, file, file_url, node): assert file.checkout is None res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': user._id } } }, auth=user.auth) file.reload() file.save() node.reload() assert res.status_code == 200 assert file.checkout == user res = app.get(file_url, auth=user.auth) assert node.logs.count() == 2 assert node.logs.latest().action == NodeLog.CHECKED_OUT assert node.logs.latest().user == user assert user._id == res.json['data']['relationships']['checkout']['links']['related']['meta']['id'] assert '/{}users/{}/'.format( API_BASE, user._id ) in res.json['data']['relationships']['checkout']['links']['related']['href'] res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': None } } }, auth=user.auth) file.reload() assert file.checkout is None assert res.status_code == 200 def test_checkout_file_error(self, app, user, file_url, file): # test_checkout_file_no_type res = app.put_json_api( file_url, {'data': {'id': file._id, 'attributes': {'checkout': user._id}}}, auth=user.auth, expect_errors=True ) assert res.status_code == 400 # test_checkout_file_no_id res = app.put_json_api( file_url, {'data': {'type': 'files', 'attributes': {'checkout': user._id}}}, auth=user.auth, expect_errors=True ) assert res.status_code == 400 # test_checkout_file_incorrect_type res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'Wrong type.', 'attributes': { 'checkout': user._id } } }, auth=user.auth, expect_errors=True) assert res.status_code == 409 # test_checkout_file_incorrect_id res = app.put_json_api( file_url, { 'data': { 'id': '12345', 'type': 'files', 'attributes': { 'checkout': user._id } } }, auth=user.auth, expect_errors=True) assert res.status_code == 409 # test_checkout_file_no_attributes res = app.put_json_api( file_url, {'data': {'id': file._id, 'type': 'files'}}, auth=user.auth, expect_errors=True ) assert res.status_code == 400 def test_must_set_self(self, app, user, file, file_url): user_unauthorized = UserFactory() assert file.checkout is None res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': user_unauthorized._id } } }, auth=user.auth, expect_errors=True, ) file.reload() assert res.status_code == 400 assert file.checkout is None def test_must_be_self(self, app, file, file_url): user = AuthUserFactory() file.checkout = user file.save() res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': user._id } } }, auth=user.auth, expect_errors=True, ) file.reload() assert res.status_code == 403 assert file.checkout == user def test_admin_can_checkin(self, app, user, node, file, file_url): user_unauthorized = UserFactory() node.add_contributor(user_unauthorized) file.checkout = user_unauthorized file.save() res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': None } } }, auth=user.auth, expect_errors=True, ) file.reload() node.reload() assert res.status_code == 200 assert file.checkout is None assert node.logs.latest().action == NodeLog.CHECKED_IN assert node.logs.latest().user == user def test_admin_can_checkout(self, app, user, file_url, file, node): res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': user._id } } }, auth=user.auth, expect_errors=True, ) file.reload() node.reload() assert res.status_code == 200 assert file.checkout == user assert node.logs.latest().action == NodeLog.CHECKED_OUT assert node.logs.latest().user == user def test_cannot_checkin_when_already_checked_in( self, app, user, node, file, file_url): count = node.logs.count() assert not file.is_checked_out res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': None } } }, auth=user.auth, expect_errors=True, ) file.reload() node.reload() assert res.status_code == 200 assert node.logs.count() == count assert file.checkout is None def test_cannot_checkout_when_checked_out( self, app, user, node, file, file_url): user_unauthorized = UserFactory() node.add_contributor(user_unauthorized) file.checkout = user_unauthorized file.save() count = node.logs.count() res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': user._id } } }, auth=user.auth, expect_errors=True, ) file.reload() node.reload() assert res.status_code == 200 assert file.checkout == user_unauthorized assert node.logs.count() == count def test_noncontrib_and_read_contrib_cannot_checkout( self, app, file, node, file_url): # test_noncontrib_cannot_checkout non_contrib = AuthUserFactory() assert file.checkout is None assert not node.has_permission(non_contrib, READ) res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': non_contrib._id } } }, auth=non_contrib.auth, expect_errors=True, ) file.reload() node.reload() assert res.status_code == 403 assert file.checkout is None assert node.logs.latest().action != NodeLog.CHECKED_OUT # test_read_contrib_cannot_checkout read_contrib = AuthUserFactory() node.add_contributor(read_contrib, permissions=READ) node.save() assert not node.can_edit(user=read_contrib) res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': None } } }, auth=read_contrib.auth, expect_errors=True) file.reload() assert res.status_code == 403 assert file.checkout is None assert node.logs.latest().action != NodeLog.CHECKED_OUT def test_write_contrib_can_checkin(self, app, node, file, file_url): write_contrib = AuthUserFactory() node.add_contributor(write_contrib, permissions=WRITE) node.save() assert node.can_edit(user=write_contrib) file.checkout = write_contrib file.save() res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': None } } }, auth=write_contrib.auth, ) file.reload() assert res.status_code == 200 assert file.checkout is None @mock.patch('addons.osfstorage.listeners.enqueue_postcommit_task') def test_removed_contrib_files_checked_in(self, mock_enqueue, app, node, file): write_contrib = AuthUserFactory() node.add_contributor(write_contrib, permissions=WRITE) node.save() assert node.can_edit(user=write_contrib) file.checkout = write_contrib file.save() assert file.is_checked_out node.remove_contributor(write_contrib, auth=Auth(write_contrib)) mock_enqueue.assert_called_with(checkin_files_task, (node._id, write_contrib._id,), {}, celery=True) def test_must_be_osfstorage(self, app, user, file, file_url): file.recast(GithubFileNode._typedmodels_type) file.save() res = app.put_json_api( file_url, { 'data': { 'id': file._id, 'type': 'files', 'attributes': { 'checkout': user._id } } }, auth=user.auth, expect_errors=True, ) assert res.status_code == 403 def test_get_file_guids_misc(self, app, user, file, node): # test_get_file_resolves_guids guid = file.get_guid(create=True) url = '/{}files/{}/'.format(API_BASE, guid._id) res = app.get(url, auth=user.auth) assert res.status_code == 200 assert set(res.json.keys()) == {'meta', 'data'} assert res.json['data']['attributes']['path'] == file.path # test_get_file_invalid_guid_gives_404 url = '/{}files/{}/'.format(API_BASE, 'asdasasd') res = app.get(url, auth=user.auth, expect_errors=True) assert res.status_code == 404 # test_get_file_non_file_guid_gives_404 url = '/{}files/{}/'.format(API_BASE, node._id) res = app.get(url, auth=user.auth, expect_errors=True) assert res.status_code == 404 def test_current_version_is_equal_to_length_of_history( self, app, user, file_url, file): res = app.get(file_url, auth=user.auth) assert res.json['data']['attributes']['current_version'] == 1 for version in range(2, 4): file.create_version(user, { 'object': '06d80e' + str(version), 'service': 'cloud', osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', }, {'size': 1337, 'contentType': 'img/png'}).save() res = app.get(file_url, auth=user.auth) assert res.json['data']['attributes']['current_version'] == version # Regression test for OSF-7758 def test_folder_files_relationships_contains_guid_not_id( self, app, user, node): folder = node.get_addon('osfstorage').get_root( ).append_folder('I\'d be a teacher!!') folder.save() folder_url = '/{}files/{}/'.format(API_BASE, folder._id) res = app.get(folder_url, auth=user.auth) split_href = res.json['data']['relationships']['files']['links']['related']['href'].split( '/') assert node._id in split_href assert node.id not in split_href def test_embed_user_on_quickfiles_detail(self, app, user): quickfiles = QuickFilesNode.objects.get(creator=user) osfstorage = quickfiles.get_addon('osfstorage') root = osfstorage.get_root() test_file = root.append_file('speedyfile.txt') url = '/{}files/{}/?embed=user'.format(API_BASE, test_file._id) res = app.get(url, auth=user.auth) assert res.json['data'].get('embeds', None) assert res.json['data']['embeds'].get('user') assert res.json['data']['embeds']['user']['data']['id'] == user._id @pytest.mark.django_db class TestFileVersionView: @pytest.fixture() def node(self, user): return ProjectFactory(creator=user) @pytest.fixture() def osfstorage(self, node): return node.get_addon('osfstorage') @pytest.fixture() def root_node(self, osfstorage): return osfstorage.get_root() @pytest.fixture() def file(self, root_node, user): file = root_node.append_file('test_file') file.create_version(user, { 'object': '06d80e', 'service': 'cloud', osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', }, { 'size': 1337, 'contentType': 'img/png' }).save() return file def test_listing(self, app, user, file): file.create_version(user, { 'object': '0683m38e', 'service': 'cloud', osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', }, { 'size': 1347, 'contentType': 'img/png' }).save() res = app.get( '/{}files/{}/versions/'.format(API_BASE, file._id), auth=user.auth, ) assert res.status_code == 200 assert len(res.json['data']) == 2 assert res.json['data'][0]['id'] == '2' assert res.json['data'][0]['attributes']['name'] == file.name assert res.json['data'][1]['id'] == '1' assert res.json['data'][1]['attributes']['name'] == file.name def test_load_and_property(self, app, user, file): # test_by_id res = app.get( '/{}files/{}/versions/1/'.format(API_BASE, file._id), auth=user.auth, ) assert res.status_code == 200 assert res.json['data']['id'] == '1' mfr_url = get_mfr_url(file, 'osfstorage') render_link = res.json['data']['links']['render'] download_link = res.json['data']['links']['download'] assert mfr_url in render_link assert download_link in render_link assert 'revision=1' in render_link guid = file.get_guid(create=True)._id res = app.get( '/{}files/{}/versions/1/'.format(API_BASE, file._id), auth=user.auth, ) render_link = res.json['data']['links']['render'] download_link = res.json['data']['links']['download'] assert mfr_url in render_link assert download_link in render_link assert guid in render_link assert 'revision=1' in render_link # test_read_only assert app.put( '/{}files/{}/versions/1/'.format(API_BASE, file._id), expect_errors=True, auth=user.auth, ).status_code == 405 assert app.post( '/{}files/{}/versions/1/'.format(API_BASE, file._id), expect_errors=True, auth=user.auth, ).status_code == 405 assert app.delete( '/{}files/{}/versions/1/'.format(API_BASE, file._id), expect_errors=True, auth=user.auth, ).status_code == 405 @pytest.mark.django_db class TestFileTagging: @pytest.fixture() def node(self, user): return ProjectFactory(creator=user) @pytest.fixture() def file_one(self, user, node): return api_utils.create_test_file( node, user, filename='file_one') @pytest.fixture() def payload(self, file_one): payload = { 'data': { 'type': 'files', 'id': file_one._id, 'attributes': { 'checkout': None, 'tags': ['goofy'] } } } return payload @pytest.fixture() def url(self, file_one): return '/{}files/{}/'.format(API_BASE, file_one._id) def test_tags_add_and_update_properly(self, app, user, url, payload): # test_tags_add_properly res = app.put_json_api(url, payload, auth=user.auth) assert res.status_code == 200 # Ensure adding tag data is correct from the PUT response assert len(res.json['data']['attributes']['tags']) == 1 assert res.json['data']['attributes']['tags'][0] == 'goofy' # test_tags_update_properly # Ensure removing and adding tag data is correct from the PUT response payload['data']['attributes']['tags'] = ['goofier'] res = app.put_json_api(url, payload, auth=user.auth) assert res.status_code == 200 assert len(res.json['data']['attributes']['tags']) == 1 assert res.json['data']['attributes']['tags'][0] == 'goofier' def test_tags_add_and_remove_properly(self, app, user, url, payload): app.put_json_api(url, payload, auth=user.auth) payload['data']['attributes']['tags'] = [] res = app.put_json_api(url, payload, auth=user.auth) assert res.status_code == 200 assert len(res.json['data']['attributes']['tags']) == 0 def test_put_wo_tags_doesnt_remove_tags(self, app, user, url, payload): app.put_json_api(url, payload, auth=user.auth) payload['data']['attributes'] = {'checkout': None} res = app.put_json_api(url, payload, auth=user.auth) assert res.status_code == 200 # Ensure adding tag data is correct from the PUT response assert len(res.json['data']['attributes']['tags']) == 1 assert res.json['data']['attributes']['tags'][0] == 'goofy' def test_add_and_remove_tag_adds_log(self, app, user, url, payload, node): # test_add_tag_adds_log count = node.logs.count() app.put_json_api(url, payload, auth=user.auth) assert node.logs.count() == count + 1 assert NodeLog.FILE_TAG_ADDED == node.logs.latest().action # test_remove_tag_adds_log payload['data']['attributes']['tags'] = [] count = node.logs.count() app.put_json_api(url, payload, auth=user.auth) assert node.logs.count() == count + 1 assert NodeLog.FILE_TAG_REMOVED == node.logs.latest().action @pytest.mark.django_db class TestPreprintFileView: @pytest.fixture() def preprint(self, user): return PreprintFactory(creator=user) @pytest.fixture() def primary_file(self, preprint): return preprint.primary_file @pytest.fixture() def file_url(self, primary_file): return '/{}files/{}/'.format(API_BASE, primary_file._id) @pytest.fixture() def other_user(self): return AuthUserFactory() def test_published_preprint_file(self, app, file_url, preprint, user, other_user): # Unauthenticated res = app.get(file_url, expect_errors=True) assert res.status_code == 200 # Non contrib res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 200 # Write contrib preprint.add_contributor(other_user, WRITE, save=True) res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 200 # Admin contrib res = app.get(file_url, auth=user.auth, expect_errors=True) assert res.status_code == 200 def test_unpublished_preprint_file(self, app, file_url, preprint, user, other_user): preprint.is_published = False preprint.save() # Unauthenticated res = app.get(file_url, expect_errors=True) assert res.status_code == 401 # Non contrib res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 403 # Write contrib preprint.add_contributor(other_user, WRITE, save=True) res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 200 # Admin contrib res = app.get(file_url, auth=user.auth, expect_errors=True) assert res.status_code == 200 def test_private_preprint_file(self, app, file_url, preprint, user, other_user): preprint.is_public = False preprint.save() # Unauthenticated res = app.get(file_url, expect_errors=True) assert res.status_code == 401 # Non contrib res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 403 # Write contrib preprint.add_contributor(other_user, WRITE, save=True) res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 200 # Admin contrib res = app.get(file_url, auth=user.auth, expect_errors=True) assert res.status_code == 200 def test_deleted_preprint_file(self, app, file_url, preprint, user, other_user): preprint.deleted = timezone.now() preprint.save() # Unauthenticated res = app.get(file_url, expect_errors=True) assert res.status_code == 410 # Non contrib res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 410 # Write contrib preprint.add_contributor(other_user, WRITE, save=True) res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 410 # Admin contrib res = app.get(file_url, auth=user.auth, expect_errors=True) assert res.status_code == 410 def test_abandoned_preprint_file(self, app, file_url, preprint, user, other_user): preprint.machine_state = DefaultStates.INITIAL.value preprint.save() # Unauthenticated res = app.get(file_url, expect_errors=True) assert res.status_code == 401 # Non contrib res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 403 # Write contrib preprint.add_contributor(other_user, WRITE, save=True) res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 403 # Admin contrib res = app.get(file_url, auth=user.auth, expect_errors=True) assert res.status_code == 200 def test_withdrawn_preprint_files(self, app, file_url, preprint, user, other_user): preprint.date_withdrawn = timezone.now() preprint.save() # Unauthenticated res = app.get(file_url, expect_errors=True) assert res.status_code == 401 # Noncontrib res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 403 # Write contributor preprint.add_contributor(other_user, WRITE, save=True) res = app.get(file_url, auth=other_user.auth, expect_errors=True) assert res.status_code == 403 # Admin contrib res = app.get(file_url, auth=user.auth, expect_errors=True) assert res.status_code == 403
felliott/osf.io
api_tests/files/views/test_file_detail.py
website/settings/defaults.py
""" Helper functions that convert strftime formats into more readable representations. """ from rest_framework import ISO_8601 def datetime_formats(formats): format = ', '.join(formats).replace( ISO_8601, 'YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z]' ) return humanize_strptime(format) def date_formats(formats): format = ', '.join(formats).replace(ISO_8601, 'YYYY[-MM[-DD]]') return humanize_strptime(format) def time_formats(formats): format = ', '.join(formats).replace(ISO_8601, 'hh:mm[:ss[.uuuuuu]]') return humanize_strptime(format) def humanize_strptime(format_string): # Note that we're missing some of the locale specific mappings that # don't really make sense. mapping = { "%Y": "YYYY", "%y": "YY", "%m": "MM", "%b": "[Jan-Dec]", "%B": "[January-December]", "%d": "DD", "%H": "hh", "%I": "hh", # Requires '%p' to differentiate from '%H'. "%M": "mm", "%S": "ss", "%f": "uuuuuu", "%a": "[Mon-Sun]", "%A": "[Monday-Sunday]", "%p": "[AM|PM]", "%z": "[+HHMM|-HHMM]" } for key, val in mapping.items(): format_string = format_string.replace(key, val) return format_string
# coding: utf-8 from __future__ import unicode_literals import base64 import pytest from django.conf.urls import include, url from django.contrib.auth.models import User from django.db import models from django.http import HttpResponse from django.test import TestCase, override_settings from django.utils import six from rest_framework import ( HTTP_HEADER_ENCODING, exceptions, permissions, renderers, status ) from rest_framework.authentication import ( BaseAuthentication, BasicAuthentication, SessionAuthentication, TokenAuthentication ) from rest_framework.authtoken.models import Token from rest_framework.authtoken.views import obtain_auth_token from rest_framework.compat import is_authenticated from rest_framework.response import Response from rest_framework.test import APIClient, APIRequestFactory from rest_framework.views import APIView factory = APIRequestFactory() class CustomToken(models.Model): key = models.CharField(max_length=40, primary_key=True) user = models.OneToOneField(User, on_delete=models.CASCADE) class CustomTokenAuthentication(TokenAuthentication): model = CustomToken class CustomKeywordTokenAuthentication(TokenAuthentication): keyword = 'Bearer' class MockView(APIView): permission_classes = (permissions.IsAuthenticated,) def get(self, request): return HttpResponse({'a': 1, 'b': 2, 'c': 3}) def post(self, request): return HttpResponse({'a': 1, 'b': 2, 'c': 3}) def put(self, request): return HttpResponse({'a': 1, 'b': 2, 'c': 3}) urlpatterns = [ url( r'^session/$', MockView.as_view(authentication_classes=[SessionAuthentication]) ), url( r'^basic/$', MockView.as_view(authentication_classes=[BasicAuthentication]) ), url( r'^token/$', MockView.as_view(authentication_classes=[TokenAuthentication]) ), url( r'^customtoken/$', MockView.as_view(authentication_classes=[CustomTokenAuthentication]) ), url( r'^customkeywordtoken/$', MockView.as_view( authentication_classes=[CustomKeywordTokenAuthentication] ) ), url(r'^auth-token/$', obtain_auth_token), url(r'^auth/', include('rest_framework.urls', namespace='rest_framework')), ] @override_settings(ROOT_URLCONF='tests.test_authentication') class BasicAuthTests(TestCase): """Basic authentication""" def setUp(self): self.csrf_client = APIClient(enforce_csrf_checks=True) self.username = 'john' self.email = 'lennon@thebeatles.com' self.password = 'password' self.user = User.objects.create_user( self.username, self.email, self.password ) def test_post_form_passing_basic_auth(self): """Ensure POSTing json over basic auth with correct credentials passes and does not require CSRF""" credentials = ('%s:%s' % (self.username, self.password)) base64_credentials = base64.b64encode( credentials.encode(HTTP_HEADER_ENCODING) ).decode(HTTP_HEADER_ENCODING) auth = 'Basic %s' % base64_credentials response = self.csrf_client.post( '/basic/', {'example': 'example'}, HTTP_AUTHORIZATION=auth ) assert response.status_code == status.HTTP_200_OK def test_post_json_passing_basic_auth(self): """Ensure POSTing form over basic auth with correct credentials passes and does not require CSRF""" credentials = ('%s:%s' % (self.username, self.password)) base64_credentials = base64.b64encode( credentials.encode(HTTP_HEADER_ENCODING) ).decode(HTTP_HEADER_ENCODING) auth = 'Basic %s' % base64_credentials response = self.csrf_client.post( '/basic/', {'example': 'example'}, format='json', HTTP_AUTHORIZATION=auth ) assert response.status_code == status.HTTP_200_OK def test_regression_handle_bad_base64_basic_auth_header(self): """Ensure POSTing JSON over basic auth with incorrectly padded Base64 string is handled correctly""" # regression test for issue in 'rest_framework.authentication.BasicAuthentication.authenticate' # https://github.com/encode/django-rest-framework/issues/4089 auth = 'Basic =a=' response = self.csrf_client.post( '/basic/', {'example': 'example'}, format='json', HTTP_AUTHORIZATION=auth ) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_post_form_failing_basic_auth(self): """Ensure POSTing form over basic auth without correct credentials fails""" response = self.csrf_client.post('/basic/', {'example': 'example'}) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_post_json_failing_basic_auth(self): """Ensure POSTing json over basic auth without correct credentials fails""" response = self.csrf_client.post( '/basic/', {'example': 'example'}, format='json' ) assert response.status_code == status.HTTP_401_UNAUTHORIZED assert response['WWW-Authenticate'] == 'Basic realm="api"' def test_fail_post_if_credentials_are_missing(self): response = self.csrf_client.post( '/basic/', {'example': 'example'}, HTTP_AUTHORIZATION='Basic ') assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_fail_post_if_credentials_contain_spaces(self): response = self.csrf_client.post( '/basic/', {'example': 'example'}, HTTP_AUTHORIZATION='Basic foo bar' ) assert response.status_code == status.HTTP_401_UNAUTHORIZED @override_settings(ROOT_URLCONF='tests.test_authentication') class SessionAuthTests(TestCase): """User session authentication""" def setUp(self): self.csrf_client = APIClient(enforce_csrf_checks=True) self.non_csrf_client = APIClient(enforce_csrf_checks=False) self.username = 'john' self.email = 'lennon@thebeatles.com' self.password = 'password' self.user = User.objects.create_user( self.username, self.email, self.password ) def tearDown(self): self.csrf_client.logout() def test_login_view_renders_on_get(self): """ Ensure the login template renders for a basic GET. cf. [#1810](https://github.com/encode/django-rest-framework/pull/1810) """ response = self.csrf_client.get('/auth/login/') content = response.content.decode('utf8') assert '<label for="id_username">Username:</label>' in content def test_post_form_session_auth_failing_csrf(self): """ Ensure POSTing form over session authentication without CSRF token fails. """ self.csrf_client.login(username=self.username, password=self.password) response = self.csrf_client.post('/session/', {'example': 'example'}) assert response.status_code == status.HTTP_403_FORBIDDEN def test_post_form_session_auth_passing(self): """ Ensure POSTing form over session authentication with logged in user and CSRF token passes. """ self.non_csrf_client.login( username=self.username, password=self.password ) response = self.non_csrf_client.post( '/session/', {'example': 'example'} ) assert response.status_code == status.HTTP_200_OK def test_put_form_session_auth_passing(self): """ Ensure PUTting form over session authentication with logged in user and CSRF token passes. """ self.non_csrf_client.login( username=self.username, password=self.password ) response = self.non_csrf_client.put( '/session/', {'example': 'example'} ) assert response.status_code == status.HTTP_200_OK def test_post_form_session_auth_failing(self): """ Ensure POSTing form over session authentication without logged in user fails. """ response = self.csrf_client.post('/session/', {'example': 'example'}) assert response.status_code == status.HTTP_403_FORBIDDEN class BaseTokenAuthTests(object): """Token authentication""" model = None path = None header_prefix = 'Token ' def setUp(self): self.csrf_client = APIClient(enforce_csrf_checks=True) self.username = 'john' self.email = 'lennon@thebeatles.com' self.password = 'password' self.user = User.objects.create_user( self.username, self.email, self.password ) self.key = 'abcd1234' self.token = self.model.objects.create(key=self.key, user=self.user) def test_post_form_passing_token_auth(self): """ Ensure POSTing json over token auth with correct credentials passes and does not require CSRF """ auth = self.header_prefix + self.key response = self.csrf_client.post( self.path, {'example': 'example'}, HTTP_AUTHORIZATION=auth ) assert response.status_code == status.HTTP_200_OK def test_fail_authentication_if_user_is_not_active(self): user = User.objects.create_user('foo', 'bar', 'baz') user.is_active = False user.save() self.model.objects.create(key='foobar_token', user=user) response = self.csrf_client.post( self.path, {'example': 'example'}, HTTP_AUTHORIZATION=self.header_prefix + 'foobar_token' ) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_fail_post_form_passing_nonexistent_token_auth(self): # use a nonexistent token key auth = self.header_prefix + 'wxyz6789' response = self.csrf_client.post( self.path, {'example': 'example'}, HTTP_AUTHORIZATION=auth ) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_fail_post_if_token_is_missing(self): response = self.csrf_client.post( self.path, {'example': 'example'}, HTTP_AUTHORIZATION=self.header_prefix) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_fail_post_if_token_contains_spaces(self): response = self.csrf_client.post( self.path, {'example': 'example'}, HTTP_AUTHORIZATION=self.header_prefix + 'foo bar' ) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_fail_post_form_passing_invalid_token_auth(self): # add an 'invalid' unicode character auth = self.header_prefix + self.key + "¸" response = self.csrf_client.post( self.path, {'example': 'example'}, HTTP_AUTHORIZATION=auth ) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_post_json_passing_token_auth(self): """ Ensure POSTing form over token auth with correct credentials passes and does not require CSRF """ auth = self.header_prefix + self.key response = self.csrf_client.post( self.path, {'example': 'example'}, format='json', HTTP_AUTHORIZATION=auth ) assert response.status_code == status.HTTP_200_OK def test_post_json_makes_one_db_query(self): """ Ensure that authenticating a user using a token performs only one DB query """ auth = self.header_prefix + self.key def func_to_test(): return self.csrf_client.post( self.path, {'example': 'example'}, format='json', HTTP_AUTHORIZATION=auth ) self.assertNumQueries(1, func_to_test) def test_post_form_failing_token_auth(self): """ Ensure POSTing form over token auth without correct credentials fails """ response = self.csrf_client.post(self.path, {'example': 'example'}) assert response.status_code == status.HTTP_401_UNAUTHORIZED def test_post_json_failing_token_auth(self): """ Ensure POSTing json over token auth without correct credentials fails """ response = self.csrf_client.post( self.path, {'example': 'example'}, format='json' ) assert response.status_code == status.HTTP_401_UNAUTHORIZED @override_settings(ROOT_URLCONF='tests.test_authentication') class TokenAuthTests(BaseTokenAuthTests, TestCase): model = Token path = '/token/' def test_token_has_auto_assigned_key_if_none_provided(self): """Ensure creating a token with no key will auto-assign a key""" self.token.delete() token = self.model.objects.create(user=self.user) assert bool(token.key) def test_generate_key_returns_string(self): """Ensure generate_key returns a string""" token = self.model() key = token.generate_key() assert isinstance(key, six.string_types) def test_token_login_json(self): """Ensure token login view using JSON POST works.""" client = APIClient(enforce_csrf_checks=True) response = client.post( '/auth-token/', {'username': self.username, 'password': self.password}, format='json' ) assert response.status_code == status.HTTP_200_OK assert response.data['token'] == self.key def test_token_login_json_bad_creds(self): """ Ensure token login view using JSON POST fails if bad credentials are used """ client = APIClient(enforce_csrf_checks=True) response = client.post( '/auth-token/', {'username': self.username, 'password': "badpass"}, format='json' ) assert response.status_code == 400 def test_token_login_json_missing_fields(self): """Ensure token login view using JSON POST fails if missing fields.""" client = APIClient(enforce_csrf_checks=True) response = client.post('/auth-token/', {'username': self.username}, format='json') assert response.status_code == 400 def test_token_login_form(self): """Ensure token login view using form POST works.""" client = APIClient(enforce_csrf_checks=True) response = client.post( '/auth-token/', {'username': self.username, 'password': self.password} ) assert response.status_code == status.HTTP_200_OK assert response.data['token'] == self.key @override_settings(ROOT_URLCONF='tests.test_authentication') class CustomTokenAuthTests(BaseTokenAuthTests, TestCase): model = CustomToken path = '/customtoken/' @override_settings(ROOT_URLCONF='tests.test_authentication') class CustomKeywordTokenAuthTests(BaseTokenAuthTests, TestCase): model = Token path = '/customkeywordtoken/' header_prefix = 'Bearer ' class IncorrectCredentialsTests(TestCase): def test_incorrect_credentials(self): """ If a request contains bad authentication credentials, then authentication should run and error, even if no permissions are set on the view. """ class IncorrectCredentialsAuth(BaseAuthentication): def authenticate(self, request): raise exceptions.AuthenticationFailed('Bad credentials') request = factory.get('/') view = MockView.as_view( authentication_classes=(IncorrectCredentialsAuth,), permission_classes=() ) response = view(request) assert response.status_code == status.HTTP_403_FORBIDDEN assert response.data == {'detail': 'Bad credentials'} class FailingAuthAccessedInRenderer(TestCase): def setUp(self): class AuthAccessingRenderer(renderers.BaseRenderer): media_type = 'text/plain' format = 'txt' def render(self, data, media_type=None, renderer_context=None): request = renderer_context['request'] if is_authenticated(request.user): return b'authenticated' return b'not authenticated' class FailingAuth(BaseAuthentication): def authenticate(self, request): raise exceptions.AuthenticationFailed('authentication failed') class ExampleView(APIView): authentication_classes = (FailingAuth,) renderer_classes = (AuthAccessingRenderer,) def get(self, request): return Response({'foo': 'bar'}) self.view = ExampleView.as_view() def test_failing_auth_accessed_in_renderer(self): """ When authentication fails the renderer should still be able to access `request.user` without raising an exception. Particularly relevant to HTML responses that might reasonably access `request.user`. """ request = factory.get('/') response = self.view(request) content = response.render().content assert content == b'not authenticated' class NoAuthenticationClassesTests(TestCase): def test_permission_message_with_no_authentication_classes(self): """ An unauthenticated request made against a view that contains no `authentication_classes` but do contain `permissions_classes` the error code returned should be 403 with the exception's message. """ class DummyPermission(permissions.BasePermission): message = 'Dummy permission message' def has_permission(self, request, view): return False request = factory.get('/') view = MockView.as_view( authentication_classes=(), permission_classes=(DummyPermission,), ) response = view(request) assert response.status_code == status.HTTP_403_FORBIDDEN assert response.data == {'detail': 'Dummy permission message'} class BasicAuthenticationUnitTests(TestCase): def test_base_authentication_abstract_method(self): with pytest.raises(NotImplementedError): BaseAuthentication().authenticate({}) def test_basic_authentication_raises_error_if_user_not_found(self): auth = BasicAuthentication() with pytest.raises(exceptions.AuthenticationFailed): auth.authenticate_credentials('invalid id', 'invalid password') def test_basic_authentication_raises_error_if_user_not_active(self): from rest_framework import authentication class MockUser(object): is_active = False old_authenticate = authentication.authenticate authentication.authenticate = lambda **kwargs: MockUser() auth = authentication.BasicAuthentication() with pytest.raises(exceptions.AuthenticationFailed) as error: auth.authenticate_credentials('foo', 'bar') assert 'User inactive or deleted.' in str(error) authentication.authenticate = old_authenticate
edx/django-rest-framework
tests/test_authentication.py
rest_framework/utils/humanize_datetime.py
from .decorators import endpoint from ..endpoints.health import * __all__ = ['HealthInterface'] class HealthInterface(object): @endpoint(GETServices, initialize_required=False) def list_services(self): """List all the services Returns: status [200] :class:`~async_v20.interface.response.Response` (services=( :class:`~async_v20.definitions.health_types.Service`, ...)) """ pass @endpoint(GETService, initialize_required=False) def get_service(self, service_id: ServiceID): """Get a single service Args: service_id: :class:`~async_v20.endpoints.annotations.ServiceID` Name of the service to get Returns: status [200] :class:`~async_v20.interface.response.Response` (Service= :class:`~async_v20.definitions.health_types.Service`) """ pass @endpoint(GETServiceLists, initialize_required=False) def list_service_lists(self): """List all service lists Returns: status [200] :class:`~async_v20.interface.response.Response` (lists=( :class:`~async_v20.definitions.health_types.ServiceList`, ...)) """ pass @endpoint(GETServiceList, initialize_required=False) def get_service_list(self, service_list_id: ServiceListID): """Get a single service list Args: service_list_id: :class:`~async_v20.endpoints.annotations.ServiceListID` The service list to get. Returns: status [200] :class:`~async_v20.interface.response.Response` (lists= :class:`~async_v20.definitions.health_types.ServiceList`) """ pass @endpoint(GETEvents, initialize_required=False) def list_events(self, service_id: ServiceID): """List all events for a service Args: service_id: :class:`~async_v20.endpoints.annotations.ServiceID` The service to get events for. Returns: status [200] :class:`~async_v20.interface.response.Response` (lists=( :class:`~async_v20.definitions.health_types.Event`,...)) """ pass @endpoint(GETCurrentEvent, initialize_required=False) def get_current_event(self, service_id: ServiceID): """Get the current event for a service Args: service_id: :class:`~async_v20.endpoints.annotations.ServiceID` The service to get the current event for Returns: status [200] :class:`~async_v20.interface.response.Response` (Event= :class:`~async_v20.definitions.health_types.Event`) """ pass @endpoint(GETEvent, initialize_required=False) def get_event(self, service_id: ServiceID, event_sid: EventSid): """Get an individual event Args: service_id: :class:`~async_v20.endpoints.annotations.ServiceID` The service to event for event_sid: :class:`~async_v20.endpoints.annotations.EventSid` The event to get from the specified service Returns: status [200] :class:`~async_v20.interface.response.Response` (Event= :class:`~async_v20.definitions.health_types.Event`) """ pass @endpoint(GETStatuses, initialize_required=False) def list_statuses(self): """List all statuses Returns: status [200] :class:`~async_v20.interface.response.Response` (statuses=( :class:`~async_v20.definitions.health_types.Event`, ...)) """ pass @endpoint(GETStatus, initialize_required=False) def get_status(self, status_id: StatusID): """Get an individual status Args: status_id: :class:`~async_v20.endpoints.annotations.StatusID` The status to get Returns: status [200] :class:`~async_v20.interface.response.Response` (Status= :class:`~async_v20.definitions.health_types.Status`) """ pass @endpoint(GETImages, initialize_required=False) def list_images(self): """List all status images Returns: status [200] :class:`~async_v20.interface.response.Response` (images=( :class:`~async_v20.definitions.health_types.Image`, ...)) """ pass
import inspect import json import re import pandas as pd import pytest from async_v20 import endpoints from async_v20.client import OandaClient from async_v20.definitions.types import Account from async_v20.definitions.types import DateTime from async_v20.definitions.types import OrderRequest from async_v20.definitions.types import StopLossOrderRequest from async_v20.definitions.types import ArrayInstrument from async_v20.definitions.types import MarketOrderRequest from async_v20.endpoints import POSTOrders from async_v20.endpoints.annotations import Bool from async_v20.endpoints.annotations import Authorization from async_v20.endpoints.annotations import SinceTransactionID from async_v20.endpoints.annotations import LastTransactionID from async_v20.exceptions import FailedToCreatePath, InvalidOrderRequest from async_v20.interface.helpers import _create_request_params from async_v20.interface.helpers import _format_order_request from async_v20.interface.helpers import construct_arguments from async_v20.interface.helpers import create_body from async_v20.interface.helpers import create_request_kwargs from async_v20.interface.helpers import create_url from async_v20.interface.helpers import too_many_passed_transactions from .helpers import order_dict from ..data.json_data import GETAccountID_response, example_instruments from ..fixtures.client import client from ..fixtures.server import server from ..test_definitions.helpers import get_valid_primitive_data client_attrs = [getattr(OandaClient, attr) for attr in dir(OandaClient)] client_methods = list(filter(lambda x: hasattr(x, 'endpoint'), client_attrs)) import logging logger = logging.getLogger('async_v20') logger.disabled = True client = client server = server def test_order_dict(): first = {'a': 1, 'b': 2, 'c': {'d': 3, 'e': 4, 'f': {'e': 5, 'g': 6}}} second = {'c': {'f': {'g': 6, 'e': 5}, 'e': 4, 'd': 3}, 'b': 2, 'a': 1} assert order_dict(first) == order_dict(second) @pytest.fixture def stop_loss_order(): order = StopLossOrderRequest(instrument='AUD_USD', trade_id=1234, price=0.8) yield order del order client_signatures = [inspect.signature(method) for method in client_methods] def kwargs(sig): args = {name: get_valid_primitive_data(param.annotation) for name, param in sig.parameters.items() if name != 'self'} return args annotation_lookup_arguments = [(sig, kwargs(sig)) for sig in client_signatures] @pytest.mark.asyncio @pytest.mark.parametrize('signature, arguments', annotation_lookup_arguments) async def test_construct_arguments(client, server, signature, arguments): """Ensure that the annotation lookup dictionary is built correctly""" await client.initialize() result = construct_arguments(client, signature, **arguments) for annotation, instance in result.items(): if isinstance(instance, bool): assert issubclass(annotation, Bool) elif isinstance(instance, pd.Timestamp): assert issubclass(annotation, DateTime) else: assert type(instance) == annotation locations = ['header', 'path', 'query'] test_arguments_arguments = [(getattr(endpoints, cls), location) for location in locations for cls in endpoints.__all__] @pytest.mark.parametrize('method, signature, kwargs', zip(client_methods, *zip(*annotation_lookup_arguments))) @pytest.mark.asyncio async def test_create_request_params(client, method, signature, kwargs): """Test that all every argument supplied to an endpoint goes into the HTTP request""" endpoint = method.endpoint arguments = construct_arguments(client, signature, **kwargs) total_params = [] for location in locations: result = _create_request_params(client, endpoint, arguments, location) total_params.extend(result) # These parameters are set by default in the client. # They will appear in total_arguments even though they were not passed # therefore We will remove them for default_param in ['Authorization', 'LastTransactionID', 'Accept-Datetime-Format', 'accountID']: try: total_params.remove(default_param) except ValueError: continue assert len(total_params) == len(arguments) - len(list(endpoint.request_schema)) @pytest.mark.parametrize('endpoint', [getattr(endpoints, cls) for cls in endpoints.__all__]) def test_create_url(client, endpoint): template = endpoint.path arguments = [value for value in template if not isinstance(value, str)] values = list(map(lambda x: str(x), range(len(arguments)))) arguments = dict(zip(arguments, values)) url = create_url(client, endpoint, arguments) path = url.path for value in values: assert value in path path = path[path.index(value):] @pytest.mark.parametrize('endpoint', [getattr(endpoints, cls) for cls in endpoints.__all__]) def test_create_url_raises_error_when_missing_arguments(client, endpoint): if len(endpoint.path) > 3: # URL TEMPLATES with len > 3 will require addition arguments to be passed with pytest.raises(FailedToCreatePath): url = create_url(client, endpoint, {}) @pytest.mark.asyncio @pytest.mark.parametrize('method, signature, kwargs', zip(client_methods, *zip(*annotation_lookup_arguments))) async def test_create_request_kwargs(client, server, method, signature, kwargs): await client.initialize() client.format_order_requests = True args = construct_arguments(client, signature, **kwargs) if OrderRequest in args: args.update({OrderRequest: OrderRequest(instrument='AUD_USD', units=1)}) request_kwargs = create_request_kwargs(client, method.endpoint, args) # Make sure args are not empty assert request_kwargs.get('method', 1) assert request_kwargs.get('url', 1) assert request_kwargs.get('headers', 1) assert request_kwargs.get('params', 1) assert request_kwargs.get('json', 1) assert [request_kwargs['method']] in [['POST'], ['GET'], ['PUT'], ['PATCH'], ['DELETE']] auth_in_header = 'Authorization' in request_kwargs.get('headers', '') if Authorization in method.endpoint.parameters: assert auth_in_header else: assert not auth_in_header @pytest.mark.asyncio async def test_request_body_is_constructed_correctly(client, server, stop_loss_order): await client.initialize() result = create_body(client, POSTOrders.request_schema, {OrderRequest: stop_loss_order, 'test': Account(), 'arg': 'random_string'}) correct = {'order': {'instrument':'AUD_USD','tradeID': '1234', 'price': '0.8', 'type': 'STOP_LOSS', 'timeInForce': 'GTC', 'triggerCondition': 'DEFAULT'}} assert result == correct @pytest.mark.asyncio async def test_request_body_does_not_format_order_request_with_no_instrument_parameter(client, server, stop_loss_order): await client.initialize() client.format_order_requests = True create_body(client, POSTOrders.request_schema, {OrderRequest: stop_loss_order, 'test': Account(), 'arg': 'random_string'}) @pytest.mark.asyncio async def test_request_body_raises_error_when_cannot_format_order_request(client, server): await client.initialize() client.format_order_requests = True with pytest.raises(InvalidOrderRequest): create_body(client, POSTOrders.request_schema, {OrderRequest: MarketOrderRequest(instrument='NOT AN INSTRUMENT', units=1)}) @pytest.mark.asyncio async def test_request_body_formats_order_request_when_an_order_request_is_passed(client, server): await client.initialize() client.format_order_requests = True with pytest.raises(InvalidOrderRequest): create_body(client, POSTOrders.request_schema, {OrderRequest: MarketOrderRequest(instrument='NOT AN INSTRUMENT', units=1)}) @pytest.mark.asyncio async def test_request_body_does_not_raise_error_when_an_invalid_order_request_is_passed(client, server): await client.initialize() client.format_order_requests = True body = create_body(client, POSTOrders.request_schema, {OrderRequest: OrderRequest(instrument='AUD_USD', units=0)}) assert body['order']['units'] == '1.0' @pytest.mark.asyncio async def test_objects_can_be_converted_between_Model_object_and_json(): account = Account(**GETAccountID_response['account']) response_json_account = GETAccountID_response['account'] account_to_json = account.dict(json=True, datetime_format='RFC3339') response_json_account = order_dict(response_json_account) account_to_json = order_dict(account_to_json) assert response_json_account == account_to_json @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_updates_units(instrument): order_request = OrderRequest(instrument='AUD_JPY', units=0.123456) result = _format_order_request(order_request, instrument, clip=True) assert result.units >= instrument.minimum_trade_size @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_raises_error_when_units_less_than_minimum(instrument): order_request = OrderRequest(instrument='XPT_USD', units=0.123456) with pytest.raises(InvalidOrderRequest): _format_order_request(order_request, instrument) @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_applies_correct_precision_to_units(instrument): order_request = OrderRequest(instrument=instrument.name, units=50.1234567891234) result = _format_order_request(order_request, instrument) if instrument.trade_units_precision == 0: assert re.findall(r'(?<=\.)\d+', str(result.units))[0] == '0' else: assert len(re.findall(r'(?<=\.)\d+', str(result.units))[0]) == instrument.trade_units_precision order_request = OrderRequest(instrument=instrument.name, units=0.1234567891234) result = _format_order_request(order_request, instrument, clip=True) if instrument.trade_units_precision == 0: assert re.findall(r'(?<=\.)\d+', str(result.units))[0] == '0' else: assert len(re.findall(r'(?<=\.)\d+', str(result.units))[0]) == instrument.trade_units_precision @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_applies_correct_precision_to_price_price_bound_distance(instrument): order_request = OrderRequest(instrument='AUD_USD', price=50.1234567891234, price_bound=1234.123456789, distance=20.123456789) result = _format_order_request(order_request, instrument) for attr in (result.price, result.price_bound, result.distance): if instrument.display_precision == 0: assert re.findall(r'(?<=\.)\d+', str(attr))[0] == '0' else: assert len(re.findall(r'(?<=\.)\d+', str(attr))[0]) == instrument.display_precision @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_applies_correct_precision_to_take_profit_on_fill_stop_loss_on_fill(instrument): order_request = OrderRequest(instrument=instrument.name, take_profit_on_fill=50.123456789, stop_loss_on_fill=50.123456789) result = _format_order_request(order_request, instrument) for attr in (result.stop_loss_on_fill.price, result.take_profit_on_fill): if instrument.display_precision == 0: assert re.findall(r'(?<=\.)\d+', str(attr))[0] == '0' else: assert len(re.findall(r'(?<=\.)\d+', str(attr))[0]) == instrument.display_precision @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_applies_correct_precision_to_trailing_stop_loss_on_fill(instrument): order_request = OrderRequest( instrument=instrument.name, trailing_stop_loss_on_fill=instrument.minimum_trailing_stop_distance + 0.123456789 ) result = _format_order_request(order_request, instrument) attr = result.trailing_stop_loss_on_fill.distance if instrument.display_precision == 0: assert re.findall(r'(?<=\.)\d+', str(attr))[0] == '0' else: assert len(re.findall(r'(?<=\.)\d+', str(attr))[0]) == instrument.display_precision @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_limits_trailing_stop_loss_on_fill_to_valid_range(instrument): order_request = OrderRequest( instrument=instrument.name, trailing_stop_loss_on_fill=0 ) if instrument.minimum_trailing_stop_distance > 0: with pytest.raises(InvalidOrderRequest): _format_order_request(order_request, instrument) result = _format_order_request(order_request, instrument, clip=True) assert result.trailing_stop_loss_on_fill.distance == instrument.minimum_trailing_stop_distance order_request = OrderRequest( instrument=instrument.name, trailing_stop_loss_on_fill=instrument.maximum_trailing_stop_distance + 10 ) with pytest.raises(InvalidOrderRequest): _format_order_request(order_request, instrument) result = _format_order_request(order_request, instrument, clip=True) assert result.trailing_stop_loss_on_fill.distance == instrument.maximum_trailing_stop_distance @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_limits_units_to_valid_range(instrument): order_request = OrderRequest( instrument=instrument.name, units=0 ) if instrument.minimum_trade_size > 0: with pytest.raises(InvalidOrderRequest): _format_order_request(order_request, instrument) result = _format_order_request(order_request, instrument, clip=True) assert result.units == instrument.minimum_trade_size order_request = OrderRequest( instrument=instrument.name, units=instrument.maximum_order_units + 10 ) with pytest.raises(InvalidOrderRequest): _format_order_request(order_request, instrument) result = _format_order_request(order_request, instrument, clip=True) assert result.units == instrument.maximum_order_units @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_format_order_requests_accepts_negative_values_for_units(instrument): order_request = OrderRequest( instrument=instrument.name, units=-instrument.minimum_trade_size ) result = _format_order_request(order_request, instrument, clip=False) assert result.units == -instrument.minimum_trade_size result = _format_order_request(order_request, instrument, clip=True) assert result.units == -instrument.minimum_trade_size @pytest.mark.parametrize('instrument', ArrayInstrument(*json.loads(example_instruments))) def test_ins_context_does_not_add_parameters_to_order_requests(instrument): order_request = OrderRequest( instrument=instrument.name, units=instrument.minimum_trade_size ) result = _format_order_request(order_request, instrument, clip=True) assert not hasattr(result, 'price_bound') assert not hasattr(result, 'trailing_stop_loss_on_fill') assert not hasattr(result, 'stop_loss_on_fill') assert not hasattr(result, 'take_profit_on_fill') def test_too_many_passed_transactions(client): client.default_parameters[SinceTransactionID] = 0 client.default_parameters[LastTransactionID] = 0 assert not too_many_passed_transactions(client) client.default_parameters[SinceTransactionID] = 0 client.default_parameters[LastTransactionID] = 901 assert too_many_passed_transactions(client) client.default_parameters.pop(SinceTransactionID) assert not too_many_passed_transactions(client)
jamespeterschinner/async_v20
tests/test_interface/test_helpers.py
async_v20/interface/health.py
from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import text_type from bisect import bisect_left from ._base import Trie as ABCTrie class Trie(ABCTrie): def __init__(self, data): if not all(isinstance(x, text_type) for x in data.keys()): raise TypeError("All keys must be strings") self._data = data self._keys = sorted(data.keys()) self._cachestr = "" self._cachepoints = (0, len(data)) def __contains__(self, key): return key in self._data def __len__(self): return len(self._data) def __iter__(self): return iter(self._data) def __getitem__(self, key): return self._data[key] def keys(self, prefix=None): if prefix is None or prefix == "" or not self._keys: return set(self._keys) if prefix.startswith(self._cachestr): lo, hi = self._cachepoints start = i = bisect_left(self._keys, prefix, lo, hi) else: start = i = bisect_left(self._keys, prefix) keys = set() if start == len(self._keys): return keys while self._keys[i].startswith(prefix): keys.add(self._keys[i]) i += 1 self._cachestr = prefix self._cachepoints = (start, i) return keys def has_keys_with_prefix(self, prefix): if prefix in self._data: return True if prefix.startswith(self._cachestr): lo, hi = self._cachepoints i = bisect_left(self._keys, prefix, lo, hi) else: i = bisect_left(self._keys, prefix) if i == len(self._keys): return False return self._keys[i].startswith(prefix)
import sys import datetime import os from contextlib import contextmanager import freezegun import pytest import pretend from pip._vendor import lockfile from pip.utils import outdated @pytest.mark.parametrize( ['stored_time', 'newver', 'check', 'warn'], [ ('1970-01-01T10:00:00Z', '2.0', True, True), ('1970-01-01T10:00:00Z', '1.0', True, False), ('1970-01-06T10:00:00Z', '1.0', False, False), ('1970-01-06T10:00:00Z', '2.0', False, True), ] ) def test_pip_version_check(monkeypatch, stored_time, newver, check, warn): monkeypatch.setattr(outdated, 'get_installed_version', lambda name: '1.0') resp = pretend.stub( raise_for_status=pretend.call_recorder(lambda: None), json=pretend.call_recorder(lambda: {"releases": {newver: {}}}), ) session = pretend.stub( get=pretend.call_recorder(lambda u, headers=None: resp), ) fake_state = pretend.stub( state={"last_check": stored_time, 'pypi_version': '1.0'}, save=pretend.call_recorder(lambda v, t: None), ) monkeypatch.setattr( outdated, 'load_selfcheck_statefile', lambda: fake_state ) monkeypatch.setattr(outdated.logger, 'warning', pretend.call_recorder(lambda s: None)) monkeypatch.setattr(outdated.logger, 'debug', pretend.call_recorder(lambda s, exc_info=None: None)) with freezegun.freeze_time( "1970-01-09 10:00:00", ignore=[ "six.moves", "pip._vendor.six.moves", "pip._vendor.requests.packages.urllib3.packages.six.moves", ]): outdated.pip_version_check(session) assert not outdated.logger.debug.calls if check: assert session.get.calls == [pretend.call( "https://pypi.python.org/pypi/pip/json", headers={"Accept": "application/json"} )] assert fake_state.save.calls == [ pretend.call(newver, datetime.datetime(1970, 1, 9, 10, 00, 00)), ] if warn: assert len(outdated.logger.warning.calls) == 1 else: assert len(outdated.logger.warning.calls) == 0 else: assert session.get.calls == [] assert fake_state.save.calls == [] def test_virtualenv_state(monkeypatch): CONTENT = '{"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}' fake_file = pretend.stub( read=pretend.call_recorder(lambda: CONTENT), write=pretend.call_recorder(lambda s: None), ) @pretend.call_recorder @contextmanager def fake_open(filename, mode='r'): yield fake_file monkeypatch.setattr(outdated, 'open', fake_open, raising=False) monkeypatch.setattr(outdated, 'running_under_virtualenv', pretend.call_recorder(lambda: True)) monkeypatch.setattr(sys, 'prefix', 'virtually_env') state = outdated.load_selfcheck_statefile() state.save('2.0', datetime.datetime.utcnow()) assert len(outdated.running_under_virtualenv.calls) == 1 expected_path = os.path.join('virtually_env', 'pip-selfcheck.json') assert fake_open.calls == [ pretend.call(expected_path), pretend.call(expected_path, 'w'), ] # json.dumps will call this a number of times assert len(fake_file.write.calls) def test_global_state(monkeypatch): CONTENT = '''{"pip_prefix": {"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}}''' fake_file = pretend.stub( read=pretend.call_recorder(lambda: CONTENT), write=pretend.call_recorder(lambda s: None), ) @pretend.call_recorder @contextmanager def fake_open(filename, mode='r'): yield fake_file monkeypatch.setattr(outdated, 'open', fake_open, raising=False) @pretend.call_recorder @contextmanager def fake_lock(filename): yield monkeypatch.setattr(outdated, "check_path_owner", lambda p: True) monkeypatch.setattr(lockfile, 'LockFile', fake_lock) monkeypatch.setattr(os.path, "exists", lambda p: True) monkeypatch.setattr(outdated, 'running_under_virtualenv', pretend.call_recorder(lambda: False)) monkeypatch.setattr(outdated, 'USER_CACHE_DIR', 'cache_dir') monkeypatch.setattr(sys, 'prefix', 'pip_prefix') state = outdated.load_selfcheck_statefile() state.save('2.0', datetime.datetime.utcnow()) assert len(outdated.running_under_virtualenv.calls) == 1 expected_path = os.path.join('cache_dir', 'selfcheck.json') assert fake_lock.calls == [pretend.call(expected_path)] assert fake_open.calls == [ pretend.call(expected_path), pretend.call(expected_path), pretend.call(expected_path, 'w'), ] # json.dumps will call this a number of times assert len(fake_file.write.calls)
jasonkying/pip
tests/unit/test_unit_outdated.py
pip/_vendor/html5lib/trie/py.py
try: import ast from pip._vendor._markerlib.markers import default_environment, compile, interpret except ImportError: if 'ast' in globals(): raise def default_environment(): return {} def compile(marker): def marker_fn(environment=None, override=None): # 'empty markers are True' heuristic won't install extra deps. return not marker.strip() marker_fn.__doc__ = marker return marker_fn def interpret(marker, environment=None, override=None): return compile(marker)()
import sys import datetime import os from contextlib import contextmanager import freezegun import pytest import pretend from pip._vendor import lockfile from pip.utils import outdated @pytest.mark.parametrize( ['stored_time', 'newver', 'check', 'warn'], [ ('1970-01-01T10:00:00Z', '2.0', True, True), ('1970-01-01T10:00:00Z', '1.0', True, False), ('1970-01-06T10:00:00Z', '1.0', False, False), ('1970-01-06T10:00:00Z', '2.0', False, True), ] ) def test_pip_version_check(monkeypatch, stored_time, newver, check, warn): monkeypatch.setattr(outdated, 'get_installed_version', lambda name: '1.0') resp = pretend.stub( raise_for_status=pretend.call_recorder(lambda: None), json=pretend.call_recorder(lambda: {"releases": {newver: {}}}), ) session = pretend.stub( get=pretend.call_recorder(lambda u, headers=None: resp), ) fake_state = pretend.stub( state={"last_check": stored_time, 'pypi_version': '1.0'}, save=pretend.call_recorder(lambda v, t: None), ) monkeypatch.setattr( outdated, 'load_selfcheck_statefile', lambda: fake_state ) monkeypatch.setattr(outdated.logger, 'warning', pretend.call_recorder(lambda s: None)) monkeypatch.setattr(outdated.logger, 'debug', pretend.call_recorder(lambda s, exc_info=None: None)) with freezegun.freeze_time( "1970-01-09 10:00:00", ignore=[ "six.moves", "pip._vendor.six.moves", "pip._vendor.requests.packages.urllib3.packages.six.moves", ]): outdated.pip_version_check(session) assert not outdated.logger.debug.calls if check: assert session.get.calls == [pretend.call( "https://pypi.python.org/pypi/pip/json", headers={"Accept": "application/json"} )] assert fake_state.save.calls == [ pretend.call(newver, datetime.datetime(1970, 1, 9, 10, 00, 00)), ] if warn: assert len(outdated.logger.warning.calls) == 1 else: assert len(outdated.logger.warning.calls) == 0 else: assert session.get.calls == [] assert fake_state.save.calls == [] def test_virtualenv_state(monkeypatch): CONTENT = '{"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}' fake_file = pretend.stub( read=pretend.call_recorder(lambda: CONTENT), write=pretend.call_recorder(lambda s: None), ) @pretend.call_recorder @contextmanager def fake_open(filename, mode='r'): yield fake_file monkeypatch.setattr(outdated, 'open', fake_open, raising=False) monkeypatch.setattr(outdated, 'running_under_virtualenv', pretend.call_recorder(lambda: True)) monkeypatch.setattr(sys, 'prefix', 'virtually_env') state = outdated.load_selfcheck_statefile() state.save('2.0', datetime.datetime.utcnow()) assert len(outdated.running_under_virtualenv.calls) == 1 expected_path = os.path.join('virtually_env', 'pip-selfcheck.json') assert fake_open.calls == [ pretend.call(expected_path), pretend.call(expected_path, 'w'), ] # json.dumps will call this a number of times assert len(fake_file.write.calls) def test_global_state(monkeypatch): CONTENT = '''{"pip_prefix": {"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}}''' fake_file = pretend.stub( read=pretend.call_recorder(lambda: CONTENT), write=pretend.call_recorder(lambda s: None), ) @pretend.call_recorder @contextmanager def fake_open(filename, mode='r'): yield fake_file monkeypatch.setattr(outdated, 'open', fake_open, raising=False) @pretend.call_recorder @contextmanager def fake_lock(filename): yield monkeypatch.setattr(outdated, "check_path_owner", lambda p: True) monkeypatch.setattr(lockfile, 'LockFile', fake_lock) monkeypatch.setattr(os.path, "exists", lambda p: True) monkeypatch.setattr(outdated, 'running_under_virtualenv', pretend.call_recorder(lambda: False)) monkeypatch.setattr(outdated, 'USER_CACHE_DIR', 'cache_dir') monkeypatch.setattr(sys, 'prefix', 'pip_prefix') state = outdated.load_selfcheck_statefile() state.save('2.0', datetime.datetime.utcnow()) assert len(outdated.running_under_virtualenv.calls) == 1 expected_path = os.path.join('cache_dir', 'selfcheck.json') assert fake_lock.calls == [pretend.call(expected_path)] assert fake_open.calls == [ pretend.call(expected_path), pretend.call(expected_path), pretend.call(expected_path, 'w'), ] # json.dumps will call this a number of times assert len(fake_file.write.calls)
jasonkying/pip
tests/unit/test_unit_outdated.py
pip/_vendor/_markerlib/__init__.py
from __future__ import absolute_import, division, unicode_literals from datrie import Trie as DATrie from pip._vendor.six import text_type from ._base import Trie as ABCTrie class Trie(ABCTrie): def __init__(self, data): chars = set() for key in data.keys(): if not isinstance(key, text_type): raise TypeError("All keys must be strings") for char in key: chars.add(char) self._data = DATrie("".join(chars)) for key, value in data.items(): self._data[key] = value def __contains__(self, key): return key in self._data def __len__(self): return len(self._data) def __iter__(self): raise NotImplementedError() def __getitem__(self, key): return self._data[key] def keys(self, prefix=None): return self._data.keys(prefix) def has_keys_with_prefix(self, prefix): return self._data.has_keys_with_prefix(prefix) def longest_prefix(self, prefix): return self._data.longest_prefix(prefix) def longest_prefix_item(self, prefix): return self._data.longest_prefix_item(prefix)
import sys import datetime import os from contextlib import contextmanager import freezegun import pytest import pretend from pip._vendor import lockfile from pip.utils import outdated @pytest.mark.parametrize( ['stored_time', 'newver', 'check', 'warn'], [ ('1970-01-01T10:00:00Z', '2.0', True, True), ('1970-01-01T10:00:00Z', '1.0', True, False), ('1970-01-06T10:00:00Z', '1.0', False, False), ('1970-01-06T10:00:00Z', '2.0', False, True), ] ) def test_pip_version_check(monkeypatch, stored_time, newver, check, warn): monkeypatch.setattr(outdated, 'get_installed_version', lambda name: '1.0') resp = pretend.stub( raise_for_status=pretend.call_recorder(lambda: None), json=pretend.call_recorder(lambda: {"releases": {newver: {}}}), ) session = pretend.stub( get=pretend.call_recorder(lambda u, headers=None: resp), ) fake_state = pretend.stub( state={"last_check": stored_time, 'pypi_version': '1.0'}, save=pretend.call_recorder(lambda v, t: None), ) monkeypatch.setattr( outdated, 'load_selfcheck_statefile', lambda: fake_state ) monkeypatch.setattr(outdated.logger, 'warning', pretend.call_recorder(lambda s: None)) monkeypatch.setattr(outdated.logger, 'debug', pretend.call_recorder(lambda s, exc_info=None: None)) with freezegun.freeze_time( "1970-01-09 10:00:00", ignore=[ "six.moves", "pip._vendor.six.moves", "pip._vendor.requests.packages.urllib3.packages.six.moves", ]): outdated.pip_version_check(session) assert not outdated.logger.debug.calls if check: assert session.get.calls == [pretend.call( "https://pypi.python.org/pypi/pip/json", headers={"Accept": "application/json"} )] assert fake_state.save.calls == [ pretend.call(newver, datetime.datetime(1970, 1, 9, 10, 00, 00)), ] if warn: assert len(outdated.logger.warning.calls) == 1 else: assert len(outdated.logger.warning.calls) == 0 else: assert session.get.calls == [] assert fake_state.save.calls == [] def test_virtualenv_state(monkeypatch): CONTENT = '{"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}' fake_file = pretend.stub( read=pretend.call_recorder(lambda: CONTENT), write=pretend.call_recorder(lambda s: None), ) @pretend.call_recorder @contextmanager def fake_open(filename, mode='r'): yield fake_file monkeypatch.setattr(outdated, 'open', fake_open, raising=False) monkeypatch.setattr(outdated, 'running_under_virtualenv', pretend.call_recorder(lambda: True)) monkeypatch.setattr(sys, 'prefix', 'virtually_env') state = outdated.load_selfcheck_statefile() state.save('2.0', datetime.datetime.utcnow()) assert len(outdated.running_under_virtualenv.calls) == 1 expected_path = os.path.join('virtually_env', 'pip-selfcheck.json') assert fake_open.calls == [ pretend.call(expected_path), pretend.call(expected_path, 'w'), ] # json.dumps will call this a number of times assert len(fake_file.write.calls) def test_global_state(monkeypatch): CONTENT = '''{"pip_prefix": {"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}}''' fake_file = pretend.stub( read=pretend.call_recorder(lambda: CONTENT), write=pretend.call_recorder(lambda s: None), ) @pretend.call_recorder @contextmanager def fake_open(filename, mode='r'): yield fake_file monkeypatch.setattr(outdated, 'open', fake_open, raising=False) @pretend.call_recorder @contextmanager def fake_lock(filename): yield monkeypatch.setattr(outdated, "check_path_owner", lambda p: True) monkeypatch.setattr(lockfile, 'LockFile', fake_lock) monkeypatch.setattr(os.path, "exists", lambda p: True) monkeypatch.setattr(outdated, 'running_under_virtualenv', pretend.call_recorder(lambda: False)) monkeypatch.setattr(outdated, 'USER_CACHE_DIR', 'cache_dir') monkeypatch.setattr(sys, 'prefix', 'pip_prefix') state = outdated.load_selfcheck_statefile() state.save('2.0', datetime.datetime.utcnow()) assert len(outdated.running_under_virtualenv.calls) == 1 expected_path = os.path.join('cache_dir', 'selfcheck.json') assert fake_lock.calls == [pretend.call(expected_path)] assert fake_open.calls == [ pretend.call(expected_path), pretend.call(expected_path), pretend.call(expected_path, 'w'), ] # json.dumps will call this a number of times assert len(fake_file.write.calls)
jasonkying/pip
tests/unit/test_unit_outdated.py
pip/_vendor/html5lib/trie/datrie.py
# -*- coding: utf-8 -*- # Copyright 2007-2021 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. import numpy as np from hyperspy import model from hyperspy.exceptions import NavigationSizeError def AIC(model): """Calculates the Akaike information criterion AIC = 2 k - 2 ln(L) where L is the maximum likelihood function value, k is the number of free parameters. """ # maybe should not have any Offset components? # more than a single pixel, needs iterating, don't do that for now if model.axes_manager.navigation_size: raise NavigationSizeError(model.axes_manager.navigation_size, 0) model._set_p0() # correctly set the parameters (numbers / values) lnL = model._poisson_likelihood_function( model.p0, model.axis.axis[ model.channel_switches]) k = len(model.p0) + 1 # +1 for the variance return 2 * k - 2 * lnL def AICc(model): _aic = AIC(model) n = model.axes_manager.signal_size k = len(model.p0) + 1 return _aic + (2. * k * (k + 1)) / (n - k - 1) def BIC(model): """Calculates the Bayesian information criterion BIC = -2 * ln(L) + k * ln(n) where L is the maximum likelihood function, k is the number of free parameters, and n is the number of data points (observations) / sample size. """ # maybe should not have any Offset components? # more than a single pixel, needs iterating, don't do that for now if model.axes_manager.navigation_size: raise NavigationSizeError(model.axes_manager.navigation_size, 0) model._set_p0() # correctly set the parameters (numbers / values) lnL = model._poisson_likelihood_function( model.p0, model.axis.axis[ model.channel_switches]) n = model.axes_manager.signal_size k = len(model.p0) + 1 return k * np.log(n) - 2. * lnL
import json import os import numpy as np import pytest from hyperspy import signals from hyperspy.io import load from hyperspy.misc.test_utils import assert_deep_almost_equal test_files = ['30x30_instructively_packed_16bit_compressed.bcf', '16x16_12bit_packed_8bit.bcf', 'P45_the_default_job.bcf', 'test_TEM.bcf', 'Hitachi_TM3030Plus.bcf', 'over16bit.bcf', 'bcf_v2_50x50px.bcf', 'bcf-edx-ebsd.bcf'] np_file = ['30x30_16bit.npy', '30x30_16bit_ds.npy'] spx_files = ['extracted_from_bcf.spx', 'bruker_nano.spx'] my_path = os.path.dirname(__file__) def test_load_16bit(): # test bcf from hyperspy load function level # some of functions can be not covered # it cant use cython parsing implementation, as it is not compiled filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf instructively packed 16bit...') s = load(filename) bse, hype = s # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[0]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (30, 30, 2048) def test_load_16bit_reduced(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing downsampled 16bit bcf...') s = load(filename, downsample=4, cutoff_at_kV=10) bse, hype = s # sem images are never downsampled assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[1]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (8, 8, 1047) # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_load_8bit(): for bcffile in test_files[1:3]: filename = os.path.join(my_path, 'bruker_data', bcffile) print('testing simple 8bit bcf...') s = load(filename) bse, hype = s[0], s[-1] # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_hyperspy_wrap(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') from hyperspy.exceptions import VisibleDeprecationWarning with pytest.warns(VisibleDeprecationWarning): hype = load(filename, select_type='spectrum') hype = load(filename, select_type='spectrum_image') np.testing.assert_allclose( hype.axes_manager[0].scale, 1.66740910949362, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 1.66740910949362, atol=1E-12) assert hype.axes_manager[1].units == 'µm' np.testing.assert_allclose(hype.axes_manager[2].scale, 0.009999) np.testing.assert_allclose(hype.axes_manager[2].offset, -0.47225277) assert hype.axes_manager[2].units == 'keV' assert hype.axes_manager[2].is_binned == True md_ref = { 'Acquisition_instrument': { 'SEM': { 'beam_energy': 20, 'magnification': 1819.22595, 'Detector': { 'EDS': { 'elevation_angle': 35.0, 'detector_type': 'XFlash 6|10', 'azimuth_angle': 90.0, 'real_time': 70.07298, 'energy_resolution_MnKa': 130.0}}, 'Stage': { 'tilt_alpha': 0.0, 'rotation': 326.10089, 'x': 66940.81, 'y': 54233.16, 'z': 39194.77}}}, 'General': { 'original_filename': '30x30_instructively_packed_16bit_compressed.bcf', 'title': 'EDX', 'date': '2018-10-04', 'time': '13:02:07'}, 'Sample': { 'name': 'chevkinite', 'elements': ['Al', 'C', 'Ca', 'Ce', 'Fe', 'Gd', 'K', 'Mg', 'Na', 'Nd', 'O', 'P', 'Si', 'Sm', 'Th', 'Ti'], 'xray_lines': ['Al_Ka', 'C_Ka', 'Ca_Ka', 'Ce_La', 'Fe_Ka', 'Gd_La', 'K_Ka', 'Mg_Ka', 'Na_Ka', 'Nd_La', 'O_Ka', 'P_Ka', 'Si_Ka', 'Sm_La', 'Th_Ma', 'Ti_Ka']}, 'Signal': { 'quantity': 'X-rays (Counts)', 'signal_type': 'EDS_SEM'}, '_HyperSpy': { 'Folding': {'original_axes_manager': None, 'original_shape': None, 'signal_unfolded': False, 'unfolded': False}}} filename_omd = os.path.join(my_path, 'bruker_data', '30x30_original_metadata.json') with open(filename_omd) as fn: # original_metadata: omd_ref = json.load(fn) assert_deep_almost_equal(hype.metadata.as_dictionary(), md_ref) assert_deep_almost_equal(hype.original_metadata.as_dictionary(), omd_ref) assert hype.metadata.General.date == "2018-10-04" assert hype.metadata.General.time == "13:02:07" assert hype.metadata.Signal.quantity == "X-rays (Counts)" def test_hyperspy_wrap_downsampled(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') hype = load(filename, select_type='spectrum_image', downsample=5) np.testing.assert_allclose( hype.axes_manager[0].scale, 8.337045547468101, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 8.337045547468101, atol=1E-12) assert hype.axes_manager[1].units == 'µm' def test_get_mode(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='SEM') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='TEM') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[3]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) def test_wrong_file(): filename = os.path.join(my_path, 'bruker_data', 'Nope.bcf') with pytest.raises(TypeError): load(filename) def test_fast_bcf(): thingy = pytest.importorskip("hyperspy.io_plugins.unbcf_fast") from hyperspy.io_plugins import bruker for bcffile in test_files: filename = os.path.join(my_path, 'bruker_data', bcffile) thingy = bruker.BCF_reader(filename) for j in range(2, 5, 1): print('downsampling:', j) bruker.fast_unbcf = True # manually enabling fast parsing hmap1 = thingy.parse_hypermap(downsample=j) # using cython bruker.fast_unbcf = False # manually disabling fast parsing hmap2 = thingy.parse_hypermap(downsample=j) # py implementation np.testing.assert_array_equal(hmap1, hmap2) def test_decimal_regex(): from hyperspy.io_plugins.bruker import fix_dec_patterns dummy_xml_positive = [b'<dummy_tag>85,658</dummy_tag>', b'<dummy_tag>85,658E-8</dummy_tag>', b'<dummy_tag>-85,658E-8</dummy_tag>', b'<dum_tag>-85.658</dum_tag>', # negative check b'<dum_tag>85.658E-8</dum_tag>'] # negative check dummy_xml_negative = [b'<dum_tag>12,25,23,45,56,12,45</dum_tag>', b'<dum_tag>12e1,23,-24E-5</dum_tag>'] for i in dummy_xml_positive: assert b'85.658' in fix_dec_patterns.sub(b'\\1.\\2', i) for j in dummy_xml_negative: assert b'.' not in fix_dec_patterns.sub(b'\\1.\\2', j) def test_all_spx_loads(): for spxfile in spx_files: filename = os.path.join(my_path, 'bruker_data', spxfile) s = load(filename) assert s.data.dtype == np.uint64 assert s.metadata.Signal.signal_type == 'EDS_SEM' def test_stand_alone_spx(): filename = os.path.join(my_path, 'bruker_data', 'bruker_nano.spx') s = load(filename) assert s.metadata.Sample.elements == ['Fe', 'S', 'Cu'] assert s.metadata.Acquisition_instrument.SEM.Detector.EDS.live_time == 7.385 def test_bruker_XRF(): # See https://github.com/hyperspy/hyperspy/issues/2689 # Bruker M6 Jetstream SPX filename = os.path.join(my_path, 'bruker_data', 'bruker_m6_jetstream_file_example.spx') s = load(filename) assert s.metadata.Acquisition_instrument.TEM.Detector.EDS.live_time == 28.046 assert s.metadata.Acquisition_instrument.TEM.beam_energy == 50
ericpre/hyperspy
hyperspy/tests/io/test_bruker.py
hyperspy/utils/model_selection.py
# -*- coding: utf-8 -*- # Copyright 2007-2021 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. import math import numbers import numpy as np import dask.array as da from functools import reduce def symmetrize(a): return a + a.swapaxes(0, 1) - np.diag(a.diagonal()) def antisymmetrize(a): return a - a.swapaxes(0, 1) + np.diag(a.diagonal()) def closest_nice_number(number): oom = 10 ** math.floor(math.log10(number)) return oom * (number // oom) def get_linear_interpolation(p1, p2, x): """Given two points in 2D returns y for a given x for y = ax + b Parameters ---------- p1,p2 : (x, y) x : float Returns ------- y : float """ x1, y1 = p1 x2, y2 = p2 a = (y2 - y1) / (x2 - x1) b = (x2 * y1 - x1 * y2) / (x2 - x1) y = a * x + b return y def order_of_magnitude(number): """Order of magnitude of the given number Parameters ---------- number : float Returns ------- Float """ return math.floor(math.log10(number)) def isfloat(number): """Check if a number or array is of float type. This is necessary because e.g. isinstance(np.float32(2), float) is False. """ if hasattr(number, "dtype"): return np.issubdtype(number, np.floating) else: return isinstance(number, float) def anyfloatin(things): """Check if iterable contains any non integer.""" for n in things: if isfloat(n) and not n.is_integer(): return True return False def outer_nd(*vec): """ Calculates outer product of n vectors Parameters ---------- vec : vector Return ------ out : ndarray """ return reduce(np.multiply.outer, vec) def hann_window_nth_order(m, order): """ Calculates 1D Hann window of nth order Parameters ---------- m : int number of points in window (typically the length of a signal) order : int Filter order Return ------ window : array window """ if not isinstance(m, int) or m <= 0: raise ValueError('Parameter m has to be positive integer greater than 0.') if not isinstance(order, int) or order <= 0: raise ValueError('Filter order has to be positive integer greater than 0.') sin_arg = np.pi * (m - 1.) / m cos_arg = 2. * np.pi / (m - 1.) * (np.arange(m)) return m / (order * 2 * np.pi) * sum([(-1) ** i / i * np.sin(i * sin_arg) * (np.cos(i * cos_arg) - 1) for i in range(1, order + 1)]) def optimal_fft_size(target, real=False): """Wrapper around scipy function next_fast_len() for calculating optimal FFT padding. scipy.fft was only added in 1.4.0, so we fall back to scipy.fftpack if it is not available. The main difference is that next_fast_len() does not take a second argument in the older implementation. Parameters ---------- target : int Length to start searching from. Must be a positive integer. real : bool, optional True if the FFT involves real input or output, only available for scipy > 1.4.0 Returns ------- int Optimal FFT size. """ try: from scipy.fft import next_fast_len support_real = True except ImportError: # pragma: no cover from scipy.fftpack import next_fast_len support_real = False if support_real: return next_fast_len(target, real) else: # pragma: no cover return next_fast_len(target) def check_random_state(seed, lazy=False): """Turn a random seed into a np.random.RandomState instance. Parameters ---------- seed : None or int or np.random.RandomState or dask.array.random.RandomState If None: Return the RandomState singleton used by np.random or dask.array.random If int: Return a new RandomState instance seeded with ``seed``. If np.random.RandomState: Return it. If dask.array.random.RandomState: Return it. lazy : bool, default False If True, and seed is ``None`` or ``int``, return a dask.array.random.RandomState instance instead. """ # Derived from `sklearn.utils.check_random_state`. # Copyright (c) 2007-2020 The scikit-learn developers. # All rights reserved. if seed is None or seed is np.random: return da.random._state if lazy else np.random.mtrand._rand if isinstance(seed, numbers.Integral): return da.random.RandomState(seed) if lazy else np.random.RandomState(seed) if isinstance(seed, (da.random.RandomState, np.random.RandomState)): return seed raise ValueError(f"{seed} cannot be used to seed a RandomState instance")
import json import os import numpy as np import pytest from hyperspy import signals from hyperspy.io import load from hyperspy.misc.test_utils import assert_deep_almost_equal test_files = ['30x30_instructively_packed_16bit_compressed.bcf', '16x16_12bit_packed_8bit.bcf', 'P45_the_default_job.bcf', 'test_TEM.bcf', 'Hitachi_TM3030Plus.bcf', 'over16bit.bcf', 'bcf_v2_50x50px.bcf', 'bcf-edx-ebsd.bcf'] np_file = ['30x30_16bit.npy', '30x30_16bit_ds.npy'] spx_files = ['extracted_from_bcf.spx', 'bruker_nano.spx'] my_path = os.path.dirname(__file__) def test_load_16bit(): # test bcf from hyperspy load function level # some of functions can be not covered # it cant use cython parsing implementation, as it is not compiled filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf instructively packed 16bit...') s = load(filename) bse, hype = s # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[0]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (30, 30, 2048) def test_load_16bit_reduced(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing downsampled 16bit bcf...') s = load(filename, downsample=4, cutoff_at_kV=10) bse, hype = s # sem images are never downsampled assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[1]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (8, 8, 1047) # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_load_8bit(): for bcffile in test_files[1:3]: filename = os.path.join(my_path, 'bruker_data', bcffile) print('testing simple 8bit bcf...') s = load(filename) bse, hype = s[0], s[-1] # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_hyperspy_wrap(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') from hyperspy.exceptions import VisibleDeprecationWarning with pytest.warns(VisibleDeprecationWarning): hype = load(filename, select_type='spectrum') hype = load(filename, select_type='spectrum_image') np.testing.assert_allclose( hype.axes_manager[0].scale, 1.66740910949362, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 1.66740910949362, atol=1E-12) assert hype.axes_manager[1].units == 'µm' np.testing.assert_allclose(hype.axes_manager[2].scale, 0.009999) np.testing.assert_allclose(hype.axes_manager[2].offset, -0.47225277) assert hype.axes_manager[2].units == 'keV' assert hype.axes_manager[2].is_binned == True md_ref = { 'Acquisition_instrument': { 'SEM': { 'beam_energy': 20, 'magnification': 1819.22595, 'Detector': { 'EDS': { 'elevation_angle': 35.0, 'detector_type': 'XFlash 6|10', 'azimuth_angle': 90.0, 'real_time': 70.07298, 'energy_resolution_MnKa': 130.0}}, 'Stage': { 'tilt_alpha': 0.0, 'rotation': 326.10089, 'x': 66940.81, 'y': 54233.16, 'z': 39194.77}}}, 'General': { 'original_filename': '30x30_instructively_packed_16bit_compressed.bcf', 'title': 'EDX', 'date': '2018-10-04', 'time': '13:02:07'}, 'Sample': { 'name': 'chevkinite', 'elements': ['Al', 'C', 'Ca', 'Ce', 'Fe', 'Gd', 'K', 'Mg', 'Na', 'Nd', 'O', 'P', 'Si', 'Sm', 'Th', 'Ti'], 'xray_lines': ['Al_Ka', 'C_Ka', 'Ca_Ka', 'Ce_La', 'Fe_Ka', 'Gd_La', 'K_Ka', 'Mg_Ka', 'Na_Ka', 'Nd_La', 'O_Ka', 'P_Ka', 'Si_Ka', 'Sm_La', 'Th_Ma', 'Ti_Ka']}, 'Signal': { 'quantity': 'X-rays (Counts)', 'signal_type': 'EDS_SEM'}, '_HyperSpy': { 'Folding': {'original_axes_manager': None, 'original_shape': None, 'signal_unfolded': False, 'unfolded': False}}} filename_omd = os.path.join(my_path, 'bruker_data', '30x30_original_metadata.json') with open(filename_omd) as fn: # original_metadata: omd_ref = json.load(fn) assert_deep_almost_equal(hype.metadata.as_dictionary(), md_ref) assert_deep_almost_equal(hype.original_metadata.as_dictionary(), omd_ref) assert hype.metadata.General.date == "2018-10-04" assert hype.metadata.General.time == "13:02:07" assert hype.metadata.Signal.quantity == "X-rays (Counts)" def test_hyperspy_wrap_downsampled(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') hype = load(filename, select_type='spectrum_image', downsample=5) np.testing.assert_allclose( hype.axes_manager[0].scale, 8.337045547468101, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 8.337045547468101, atol=1E-12) assert hype.axes_manager[1].units == 'µm' def test_get_mode(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='SEM') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='TEM') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[3]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) def test_wrong_file(): filename = os.path.join(my_path, 'bruker_data', 'Nope.bcf') with pytest.raises(TypeError): load(filename) def test_fast_bcf(): thingy = pytest.importorskip("hyperspy.io_plugins.unbcf_fast") from hyperspy.io_plugins import bruker for bcffile in test_files: filename = os.path.join(my_path, 'bruker_data', bcffile) thingy = bruker.BCF_reader(filename) for j in range(2, 5, 1): print('downsampling:', j) bruker.fast_unbcf = True # manually enabling fast parsing hmap1 = thingy.parse_hypermap(downsample=j) # using cython bruker.fast_unbcf = False # manually disabling fast parsing hmap2 = thingy.parse_hypermap(downsample=j) # py implementation np.testing.assert_array_equal(hmap1, hmap2) def test_decimal_regex(): from hyperspy.io_plugins.bruker import fix_dec_patterns dummy_xml_positive = [b'<dummy_tag>85,658</dummy_tag>', b'<dummy_tag>85,658E-8</dummy_tag>', b'<dummy_tag>-85,658E-8</dummy_tag>', b'<dum_tag>-85.658</dum_tag>', # negative check b'<dum_tag>85.658E-8</dum_tag>'] # negative check dummy_xml_negative = [b'<dum_tag>12,25,23,45,56,12,45</dum_tag>', b'<dum_tag>12e1,23,-24E-5</dum_tag>'] for i in dummy_xml_positive: assert b'85.658' in fix_dec_patterns.sub(b'\\1.\\2', i) for j in dummy_xml_negative: assert b'.' not in fix_dec_patterns.sub(b'\\1.\\2', j) def test_all_spx_loads(): for spxfile in spx_files: filename = os.path.join(my_path, 'bruker_data', spxfile) s = load(filename) assert s.data.dtype == np.uint64 assert s.metadata.Signal.signal_type == 'EDS_SEM' def test_stand_alone_spx(): filename = os.path.join(my_path, 'bruker_data', 'bruker_nano.spx') s = load(filename) assert s.metadata.Sample.elements == ['Fe', 'S', 'Cu'] assert s.metadata.Acquisition_instrument.SEM.Detector.EDS.live_time == 7.385 def test_bruker_XRF(): # See https://github.com/hyperspy/hyperspy/issues/2689 # Bruker M6 Jetstream SPX filename = os.path.join(my_path, 'bruker_data', 'bruker_m6_jetstream_file_example.spx') s = load(filename) assert s.metadata.Acquisition_instrument.TEM.Detector.EDS.live_time == 28.046 assert s.metadata.Acquisition_instrument.TEM.beam_energy == 50
ericpre/hyperspy
hyperspy/tests/io/test_bruker.py
hyperspy/misc/math_tools.py
# -*- coding: utf-8 -*- # Copyright 2007-2021 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. """SAMFire modules The :mod:`~hyperspy.api.samfire` module contains the following submodules: fit_tests Tests to check fit convergence when running SAMFire global_strategies Available global strategies to use in SAMFire local_strategies Available global strategies to use in SAMFire SamfirePool The parallel pool, customized to run SAMFire. """ from hyperspy.samfire_utils import (fit_tests, global_strategies, local_strategies)
import json import os import numpy as np import pytest from hyperspy import signals from hyperspy.io import load from hyperspy.misc.test_utils import assert_deep_almost_equal test_files = ['30x30_instructively_packed_16bit_compressed.bcf', '16x16_12bit_packed_8bit.bcf', 'P45_the_default_job.bcf', 'test_TEM.bcf', 'Hitachi_TM3030Plus.bcf', 'over16bit.bcf', 'bcf_v2_50x50px.bcf', 'bcf-edx-ebsd.bcf'] np_file = ['30x30_16bit.npy', '30x30_16bit_ds.npy'] spx_files = ['extracted_from_bcf.spx', 'bruker_nano.spx'] my_path = os.path.dirname(__file__) def test_load_16bit(): # test bcf from hyperspy load function level # some of functions can be not covered # it cant use cython parsing implementation, as it is not compiled filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf instructively packed 16bit...') s = load(filename) bse, hype = s # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[0]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (30, 30, 2048) def test_load_16bit_reduced(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing downsampled 16bit bcf...') s = load(filename, downsample=4, cutoff_at_kV=10) bse, hype = s # sem images are never downsampled assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[1]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (8, 8, 1047) # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_load_8bit(): for bcffile in test_files[1:3]: filename = os.path.join(my_path, 'bruker_data', bcffile) print('testing simple 8bit bcf...') s = load(filename) bse, hype = s[0], s[-1] # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_hyperspy_wrap(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') from hyperspy.exceptions import VisibleDeprecationWarning with pytest.warns(VisibleDeprecationWarning): hype = load(filename, select_type='spectrum') hype = load(filename, select_type='spectrum_image') np.testing.assert_allclose( hype.axes_manager[0].scale, 1.66740910949362, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 1.66740910949362, atol=1E-12) assert hype.axes_manager[1].units == 'µm' np.testing.assert_allclose(hype.axes_manager[2].scale, 0.009999) np.testing.assert_allclose(hype.axes_manager[2].offset, -0.47225277) assert hype.axes_manager[2].units == 'keV' assert hype.axes_manager[2].is_binned == True md_ref = { 'Acquisition_instrument': { 'SEM': { 'beam_energy': 20, 'magnification': 1819.22595, 'Detector': { 'EDS': { 'elevation_angle': 35.0, 'detector_type': 'XFlash 6|10', 'azimuth_angle': 90.0, 'real_time': 70.07298, 'energy_resolution_MnKa': 130.0}}, 'Stage': { 'tilt_alpha': 0.0, 'rotation': 326.10089, 'x': 66940.81, 'y': 54233.16, 'z': 39194.77}}}, 'General': { 'original_filename': '30x30_instructively_packed_16bit_compressed.bcf', 'title': 'EDX', 'date': '2018-10-04', 'time': '13:02:07'}, 'Sample': { 'name': 'chevkinite', 'elements': ['Al', 'C', 'Ca', 'Ce', 'Fe', 'Gd', 'K', 'Mg', 'Na', 'Nd', 'O', 'P', 'Si', 'Sm', 'Th', 'Ti'], 'xray_lines': ['Al_Ka', 'C_Ka', 'Ca_Ka', 'Ce_La', 'Fe_Ka', 'Gd_La', 'K_Ka', 'Mg_Ka', 'Na_Ka', 'Nd_La', 'O_Ka', 'P_Ka', 'Si_Ka', 'Sm_La', 'Th_Ma', 'Ti_Ka']}, 'Signal': { 'quantity': 'X-rays (Counts)', 'signal_type': 'EDS_SEM'}, '_HyperSpy': { 'Folding': {'original_axes_manager': None, 'original_shape': None, 'signal_unfolded': False, 'unfolded': False}}} filename_omd = os.path.join(my_path, 'bruker_data', '30x30_original_metadata.json') with open(filename_omd) as fn: # original_metadata: omd_ref = json.load(fn) assert_deep_almost_equal(hype.metadata.as_dictionary(), md_ref) assert_deep_almost_equal(hype.original_metadata.as_dictionary(), omd_ref) assert hype.metadata.General.date == "2018-10-04" assert hype.metadata.General.time == "13:02:07" assert hype.metadata.Signal.quantity == "X-rays (Counts)" def test_hyperspy_wrap_downsampled(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') hype = load(filename, select_type='spectrum_image', downsample=5) np.testing.assert_allclose( hype.axes_manager[0].scale, 8.337045547468101, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 8.337045547468101, atol=1E-12) assert hype.axes_manager[1].units == 'µm' def test_get_mode(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='SEM') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='TEM') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[3]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) def test_wrong_file(): filename = os.path.join(my_path, 'bruker_data', 'Nope.bcf') with pytest.raises(TypeError): load(filename) def test_fast_bcf(): thingy = pytest.importorskip("hyperspy.io_plugins.unbcf_fast") from hyperspy.io_plugins import bruker for bcffile in test_files: filename = os.path.join(my_path, 'bruker_data', bcffile) thingy = bruker.BCF_reader(filename) for j in range(2, 5, 1): print('downsampling:', j) bruker.fast_unbcf = True # manually enabling fast parsing hmap1 = thingy.parse_hypermap(downsample=j) # using cython bruker.fast_unbcf = False # manually disabling fast parsing hmap2 = thingy.parse_hypermap(downsample=j) # py implementation np.testing.assert_array_equal(hmap1, hmap2) def test_decimal_regex(): from hyperspy.io_plugins.bruker import fix_dec_patterns dummy_xml_positive = [b'<dummy_tag>85,658</dummy_tag>', b'<dummy_tag>85,658E-8</dummy_tag>', b'<dummy_tag>-85,658E-8</dummy_tag>', b'<dum_tag>-85.658</dum_tag>', # negative check b'<dum_tag>85.658E-8</dum_tag>'] # negative check dummy_xml_negative = [b'<dum_tag>12,25,23,45,56,12,45</dum_tag>', b'<dum_tag>12e1,23,-24E-5</dum_tag>'] for i in dummy_xml_positive: assert b'85.658' in fix_dec_patterns.sub(b'\\1.\\2', i) for j in dummy_xml_negative: assert b'.' not in fix_dec_patterns.sub(b'\\1.\\2', j) def test_all_spx_loads(): for spxfile in spx_files: filename = os.path.join(my_path, 'bruker_data', spxfile) s = load(filename) assert s.data.dtype == np.uint64 assert s.metadata.Signal.signal_type == 'EDS_SEM' def test_stand_alone_spx(): filename = os.path.join(my_path, 'bruker_data', 'bruker_nano.spx') s = load(filename) assert s.metadata.Sample.elements == ['Fe', 'S', 'Cu'] assert s.metadata.Acquisition_instrument.SEM.Detector.EDS.live_time == 7.385 def test_bruker_XRF(): # See https://github.com/hyperspy/hyperspy/issues/2689 # Bruker M6 Jetstream SPX filename = os.path.join(my_path, 'bruker_data', 'bruker_m6_jetstream_file_example.spx') s = load(filename) assert s.metadata.Acquisition_instrument.TEM.Detector.EDS.live_time == 28.046 assert s.metadata.Acquisition_instrument.TEM.beam_energy == 50
ericpre/hyperspy
hyperspy/tests/io/test_bruker.py
hyperspy/utils/samfire.py
# -*- coding: utf-8 -*- # Copyright 2007-2021 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. try: # Set traits toolkit to work in a headless system # Capture error when toolkit is already previously set which typically # occurs when building the doc locally from traits.etsconfig.api import ETSConfig ETSConfig.toolkit = "null" except ValueError: # in case ETSConfig.toolkit was already set previously. pass # pytest-mpl 0.7 already import pyplot, so setting the matplotlib backend to # 'agg' as early as we can is useless for testing. import matplotlib.pyplot as plt import pytest import numpy as np import matplotlib import hyperspy.api as hs matplotlib.rcParams['figure.max_open_warning'] = 25 matplotlib.rcParams['interactive'] = False hs.preferences.Plot.saturated_pixels = 0.0 hs.preferences.Plot.cmap_navigator = 'viridis' hs.preferences.Plot.cmap_signal = 'viridis' hs.preferences.Plot.pick_tolerance = 5.0 # Set parallel to False by default, so only # those tests with parallel=True are run in parallel hs.preferences.General.parallel = False @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = np doctest_namespace['plt'] = plt doctest_namespace['hs'] = hs @pytest.fixture def pdb_cmdopt(request): return request.config.getoption("--pdb") def setup_module(mod, pdb_cmdopt): if pdb_cmdopt: import dask dask.set_options(get=dask.local.get_sync) from matplotlib.testing.conftest import mpl_test_settings try: import pytest_mpl except ImportError: # Register dummy marker to allow running the test suite without pytest-mpl def pytest_configure(config): config.addinivalue_line( "markers", "mpl_image_compare: dummy marker registration to allow running " "without the pytest-mpl plugin." )
import json import os import numpy as np import pytest from hyperspy import signals from hyperspy.io import load from hyperspy.misc.test_utils import assert_deep_almost_equal test_files = ['30x30_instructively_packed_16bit_compressed.bcf', '16x16_12bit_packed_8bit.bcf', 'P45_the_default_job.bcf', 'test_TEM.bcf', 'Hitachi_TM3030Plus.bcf', 'over16bit.bcf', 'bcf_v2_50x50px.bcf', 'bcf-edx-ebsd.bcf'] np_file = ['30x30_16bit.npy', '30x30_16bit_ds.npy'] spx_files = ['extracted_from_bcf.spx', 'bruker_nano.spx'] my_path = os.path.dirname(__file__) def test_load_16bit(): # test bcf from hyperspy load function level # some of functions can be not covered # it cant use cython parsing implementation, as it is not compiled filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf instructively packed 16bit...') s = load(filename) bse, hype = s # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[0]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (30, 30, 2048) def test_load_16bit_reduced(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing downsampled 16bit bcf...') s = load(filename, downsample=4, cutoff_at_kV=10) bse, hype = s # sem images are never downsampled assert bse.data.shape == (30, 30) np_filename = os.path.join(my_path, 'bruker_data', np_file[1]) np.testing.assert_array_equal(hype.data[:, :, 222:224], np.load(np_filename)) assert hype.data.shape == (8, 8, 1047) # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_load_8bit(): for bcffile in test_files[1:3]: filename = os.path.join(my_path, 'bruker_data', bcffile) print('testing simple 8bit bcf...') s = load(filename) bse, hype = s[0], s[-1] # Bruker saves all images in true 16bit: assert bse.data.dtype == np.uint16 # hypermaps should always return unsigned integers: assert str(hype.data.dtype)[0] == 'u' def test_hyperspy_wrap(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') from hyperspy.exceptions import VisibleDeprecationWarning with pytest.warns(VisibleDeprecationWarning): hype = load(filename, select_type='spectrum') hype = load(filename, select_type='spectrum_image') np.testing.assert_allclose( hype.axes_manager[0].scale, 1.66740910949362, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 1.66740910949362, atol=1E-12) assert hype.axes_manager[1].units == 'µm' np.testing.assert_allclose(hype.axes_manager[2].scale, 0.009999) np.testing.assert_allclose(hype.axes_manager[2].offset, -0.47225277) assert hype.axes_manager[2].units == 'keV' assert hype.axes_manager[2].is_binned == True md_ref = { 'Acquisition_instrument': { 'SEM': { 'beam_energy': 20, 'magnification': 1819.22595, 'Detector': { 'EDS': { 'elevation_angle': 35.0, 'detector_type': 'XFlash 6|10', 'azimuth_angle': 90.0, 'real_time': 70.07298, 'energy_resolution_MnKa': 130.0}}, 'Stage': { 'tilt_alpha': 0.0, 'rotation': 326.10089, 'x': 66940.81, 'y': 54233.16, 'z': 39194.77}}}, 'General': { 'original_filename': '30x30_instructively_packed_16bit_compressed.bcf', 'title': 'EDX', 'date': '2018-10-04', 'time': '13:02:07'}, 'Sample': { 'name': 'chevkinite', 'elements': ['Al', 'C', 'Ca', 'Ce', 'Fe', 'Gd', 'K', 'Mg', 'Na', 'Nd', 'O', 'P', 'Si', 'Sm', 'Th', 'Ti'], 'xray_lines': ['Al_Ka', 'C_Ka', 'Ca_Ka', 'Ce_La', 'Fe_Ka', 'Gd_La', 'K_Ka', 'Mg_Ka', 'Na_Ka', 'Nd_La', 'O_Ka', 'P_Ka', 'Si_Ka', 'Sm_La', 'Th_Ma', 'Ti_Ka']}, 'Signal': { 'quantity': 'X-rays (Counts)', 'signal_type': 'EDS_SEM'}, '_HyperSpy': { 'Folding': {'original_axes_manager': None, 'original_shape': None, 'signal_unfolded': False, 'unfolded': False}}} filename_omd = os.path.join(my_path, 'bruker_data', '30x30_original_metadata.json') with open(filename_omd) as fn: # original_metadata: omd_ref = json.load(fn) assert_deep_almost_equal(hype.metadata.as_dictionary(), md_ref) assert_deep_almost_equal(hype.original_metadata.as_dictionary(), omd_ref) assert hype.metadata.General.date == "2018-10-04" assert hype.metadata.General.time == "13:02:07" assert hype.metadata.Signal.quantity == "X-rays (Counts)" def test_hyperspy_wrap_downsampled(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) print('testing bcf wrap to hyperspy signal...') hype = load(filename, select_type='spectrum_image', downsample=5) np.testing.assert_allclose( hype.axes_manager[0].scale, 8.337045547468101, atol=1E-12) np.testing.assert_allclose( hype.axes_manager[1].scale, 8.337045547468101, atol=1E-12) assert hype.axes_manager[1].units == 'µm' def test_get_mode(): filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='SEM') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image', instrument='TEM') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[0]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_SEM" assert isinstance(s, signals.EDSSEMSpectrum) filename = os.path.join(my_path, 'bruker_data', test_files[3]) s = load(filename, select_type='spectrum_image') assert s.metadata.Signal.signal_type == "EDS_TEM" assert isinstance(s, signals.EDSTEMSpectrum) def test_wrong_file(): filename = os.path.join(my_path, 'bruker_data', 'Nope.bcf') with pytest.raises(TypeError): load(filename) def test_fast_bcf(): thingy = pytest.importorskip("hyperspy.io_plugins.unbcf_fast") from hyperspy.io_plugins import bruker for bcffile in test_files: filename = os.path.join(my_path, 'bruker_data', bcffile) thingy = bruker.BCF_reader(filename) for j in range(2, 5, 1): print('downsampling:', j) bruker.fast_unbcf = True # manually enabling fast parsing hmap1 = thingy.parse_hypermap(downsample=j) # using cython bruker.fast_unbcf = False # manually disabling fast parsing hmap2 = thingy.parse_hypermap(downsample=j) # py implementation np.testing.assert_array_equal(hmap1, hmap2) def test_decimal_regex(): from hyperspy.io_plugins.bruker import fix_dec_patterns dummy_xml_positive = [b'<dummy_tag>85,658</dummy_tag>', b'<dummy_tag>85,658E-8</dummy_tag>', b'<dummy_tag>-85,658E-8</dummy_tag>', b'<dum_tag>-85.658</dum_tag>', # negative check b'<dum_tag>85.658E-8</dum_tag>'] # negative check dummy_xml_negative = [b'<dum_tag>12,25,23,45,56,12,45</dum_tag>', b'<dum_tag>12e1,23,-24E-5</dum_tag>'] for i in dummy_xml_positive: assert b'85.658' in fix_dec_patterns.sub(b'\\1.\\2', i) for j in dummy_xml_negative: assert b'.' not in fix_dec_patterns.sub(b'\\1.\\2', j) def test_all_spx_loads(): for spxfile in spx_files: filename = os.path.join(my_path, 'bruker_data', spxfile) s = load(filename) assert s.data.dtype == np.uint64 assert s.metadata.Signal.signal_type == 'EDS_SEM' def test_stand_alone_spx(): filename = os.path.join(my_path, 'bruker_data', 'bruker_nano.spx') s = load(filename) assert s.metadata.Sample.elements == ['Fe', 'S', 'Cu'] assert s.metadata.Acquisition_instrument.SEM.Detector.EDS.live_time == 7.385 def test_bruker_XRF(): # See https://github.com/hyperspy/hyperspy/issues/2689 # Bruker M6 Jetstream SPX filename = os.path.join(my_path, 'bruker_data', 'bruker_m6_jetstream_file_example.spx') s = load(filename) assert s.metadata.Acquisition_instrument.TEM.Detector.EDS.live_time == 28.046 assert s.metadata.Acquisition_instrument.TEM.beam_energy == 50
ericpre/hyperspy
hyperspy/tests/io/test_bruker.py
hyperspy/conftest.py
from os.path import join from collections import namedtuple # Base Path for ceph base_path = '/var/lib/ceph' # Base run Path base_run_path = '/var/run/ceph' tmp_path = join(base_path, 'tmp') mon_path = join(base_path, 'mon') mds_path = join(base_path, 'mds') osd_path = join(base_path, 'osd') # Default package components to install _base_components = [ 'ceph-osd', 'ceph-mds', 'ceph-mon', ] default_components = namedtuple('DefaultComponents', ['rpm', 'deb']) # the difference here is because RPMs currently name the radosgw differently than DEBs. # TODO: This needs to get unified once the packaging naming gets consistent default_components.rpm = tuple(_base_components + ['ceph-radosgw']) default_components.deb = tuple(_base_components + ['radosgw'])
from ceph_deploy import new from ceph_deploy.tests import util import pytest class TestValidateHostIp(object): def test_for_all_subnets_all_ips_match(self): ips = util.generate_ips("10.0.0.1", "10.0.0.40") ips.extend(util.generate_ips("10.0.1.1", "10.0.1.40")) subnets = ["10.0.0.1/16", "10.0.1.1/16"] assert new.validate_host_ip(ips, subnets) is None def test_all_subnets_have_one_matching_ip(self): ips = util.generate_ips("10.0.0.1", "10.0.0.40") ips.extend(util.generate_ips("10.0.1.1", "10.0.1.40")) # regardless of extra IPs that may not match. The requirement # is already satisfied ips.extend(util.generate_ips("10.1.2.1", "10.1.2.40")) subnets = ["10.0.0.1/16", "10.0.1.1/16"] assert new.validate_host_ip(ips, subnets) is None def test_not_all_subnets_have_one_matching_ip(self): ips = util.generate_ips("10.0.0.1", "10.0.0.40") ips.extend(util.generate_ips("10.0.1.1", "10.0.1.40")) subnets = ["10.0.0.1/16", "10.1.1.1/16"] with pytest.raises(RuntimeError): new.validate_host_ip(ips, subnets)
alfredodeza/ceph-deploy
ceph_deploy/tests/unit/test_new.py
ceph_deploy/util/constants.py
# -*- coding: utf-8 -*- # Copyright 2011-2017 Michael Helmling # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation # from __future__ import absolute_import, unicode_literals import taglib from tests import copy_test_file """Tests for removing existing tags in FLAC. Motivated by https://github.com/supermihi/pytaglib/issues/19. """ def test_set_value_to_empty_string_removes_tag(tmpdir): copy_file = copy_test_file('issue19.flac', tmpdir) tfile = taglib.File(copy_file) tfile.tags['COMMENT'] = [''] tfile.save() tfile.close() tfile = taglib.File(copy_file) assert 'COMMENT' not in tfile.tags tfile.close() def test_set_value_to_empty_list_removes_tag(tmpdir): copy_file = copy_test_file('issue19.flac', tmpdir) tfile = taglib.File(copy_file) tfile.tags['COMMENT'] = [] tfile.save() tfile.close() tfile = taglib.File(copy_file) assert 'COMMENT' not in tfile.tags tfile.close() def test_delete_key_removes_tag(tmpdir): copy_file = copy_test_file('issue19.flac', tmpdir) tfile = taglib.File(copy_file) del tfile.tags['COMMENT'] tfile.save() tfile.close() tfile = taglib.File(copy_file) assert 'COMMENT' not in tfile.tags tfile.close() def test_set_value_to_space_does_not_remove_tag(tmpdir): copy_file = copy_test_file('issue19.flac', tmpdir) tfile = taglib.File(copy_file) tfile.tags['COMMENT'] = [' '] tfile.save() tfile.close() tfile = taglib.File(copy_file) assert 'COMMENT' in tfile.tags assert tfile.tags['COMMENT'][0] == ' ' tfile.close()
# -*- coding: utf-8 -*- # Copyright 2019 Michael Helmling # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation # from __future__ import unicode_literals import os, stat, sys import taglib import pytest from . import copy_test_file def test_not_existing_file_raises(): """Ensure OSError is raised if a file does not exist, or is a directory.""" with pytest.raises(OSError): taglib.File('/this/file/almost/certainly/does/not/exist.flac') with pytest.raises(OSError): taglib.File('/spæciäl/chàracterß.mp3') with pytest.raises(OSError): taglib.File('/usr') # directory with pytest.raises(OSError): taglib.File("/nonexistent.ogg") @pytest.mark.skipif(sys.platform == 'win32', reason="getuid() only on windows") def test_os_error_on_save_read_only_file(tmpdir): """Ensure OSError is raised when save() is called on read-only files.""" if os.getuid() == 0: pytest.skip('taglib allows writing read-only files as root') f = copy_test_file('rare_frames.mp3', tmpdir) os.chmod(f, stat.S_IREAD) tf = taglib.File(f) assert tf.readOnly with pytest.raises(OSError): tf.save() os.chmod(f, stat.S_IREAD & stat.S_IWRITE) tf.close() @pytest.mark.skipif(sys.platform == 'win32', reason="getuid() only on windows") def test_file_with_non_ascii_name_throws_on_readonly_save(tmpdir): """Motivated by https://github.com/supermihi/pytaglib/issues/21. """ if os.getuid() == 0: pytest.skip('taglib allows writing read-only files as root') copy_file = copy_test_file('readönly.mp3', tmpdir) os.chmod(copy_file, stat.S_IREAD) tfile = taglib.File(copy_file.encode('utf8')) tfile.tags['COMMENT'] = [''] with pytest.raises(OSError): tfile.save() tfile.close() def test_can_read_bytes_filename_non_ascii(tmpdir): f = copy_test_file('testöü.flac', tmpdir) tf = taglib.File(f.encode('utf8')) tf.close() def test_can_read_unicode_filename_non_ascii(tmpdir): f = copy_test_file('testöü.flac', tmpdir) if sys.version_info.major == 2: f = unicode(f) tf = taglib.File(f) tf.close()
supermihi/pytaglib
tests/test_io.py
tests/test_flac_remove_tags.py