input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Global fixtures for depot tests""" import fauxfactory import pytest from wrapanapi import VmState from cfme.utils.conf import cfme_data from cfme.utils.log import logger from cfme.utils.net import find_pingable from cfme.utils.net import find_pingable_ipv6 from cfme.utils.net import pick_responding_ip from cfme.utils.virtual_machines import deploy_template from cfme.utils.wait import TimedOutError from cfme.utils.wait import wait_for FTP_PORT = 21 @pytest.fixture(scope="module") def depot_machine_ip(request, appliance): """ Deploy vm for depot test This fixture uses for deploy vm on provider from yaml and then receive it's ip After test run vm deletes from provider """ try: # use long-test name so it has a longer life before automatic cleanup data = cfme_data.log_db_operations vm = deploy_template( data.log_db_depot_template.provider, fauxfactory.gen_alphanumeric(26, start="long-test-depot-"), template_name=data.log_db_depot_template.template_name ) vm.ensure_state(VmState.RUNNING) except AttributeError: msg = 'Missing some yaml information necessary to deploy depot VM' logger.exception(msg) pytest.skip(msg) try: # TODO It would be better to use retry_connect here, but this requires changes to other # fixtures. found_ip = pick_responding_ip(lambda: vm.all_ips, FTP_PORT, 300, 5, 10) except TimedOutError: msg = 'Timed out waiting for reachable depot VM IP' logger.exception(msg) pytest.skip(msg) yield found_ip vm.cleanup() @pytest.fixture(scope="module") def depot_machine_ipv4_and_ipv6(request, appliance): """ Deploy vm for depot test This fixture is used for deploying a vm on a provider from the yamls and getting its ip (both ipv4 and ipv6) After test run vm deletes from provider """ try: # use long-test name so it has a longer life before automatic cleanup data = cfme_data.log_db_operations vm = deploy_template( data.log_db_depot_template.provider, f"long-test-depot-{fauxfactory.gen_alphanumeric()}", template_name=data.log_db_depot_template.template_name ) vm.ensure_state(VmState.RUNNING) except AttributeError: msg = 'Missing some yaml information necessary to deploy depot VM' logger.exception(msg) pytest.skip(msg) try: ipv4, _ = wait_for( find_pingable, func_args=[vm, False], fail_condition=None, delay=5, num_sec=300 ) ipv6, _ = wait_for( find_pingable_ipv6, func_args=[vm], fail_condition=None, delay=5, num_sec=300 ) except TimedOutError: msg = 'Timed out waiting for reachable depot VM IP' logger.exception(msg) pytest.skip(msg) yield ipv4, ipv6 vm.cleanup()
import fauxfactory import pytest from wait_for import wait_for from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.common.provider import BaseProvider from cfme.containers.provider.openshift import OpenshiftProvider from cfme.exceptions import ToolbarOptionGreyedOrUnavailable from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.infrastructure.provider.scvmm import SCVMMProvider from cfme.infrastructure.provider.virtualcenter import VMwareProvider from cfme.markers.env_markers.provider import providers from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.generators import random_vm_name from cfme.utils.log import logger from cfme.utils.providers import ProviderFilter pytestmark = [ pytest.mark.usefixtures('uses_infra_providers', 'uses_cloud_providers', 'provider'), pytest.mark.tier(2), pytest.mark.provider( gen_func=providers, filters=[ProviderFilter(classes=[BaseProvider]), ProviderFilter(classes=[SCVMMProvider, RHEVMProvider, OpenshiftProvider], inverted=True)], scope='module'), test_requirements.genealogy ] @pytest.fixture def create_vm_with_clone(request, create_vm, provider, appliance): """Fixture to provision a VM and clone it""" first_name = fauxfactory.gen_alphanumeric() last_name = fauxfactory.gen_alphanumeric() email = "{first_name}.{last_name}@test.com" provision_type = 'VMware' vm_name = random_vm_name(context=None, max_length=15) create_vm.clone_vm(email, first_name, last_name, vm_name, provision_type) vm2 = appliance.collections.infra_vms.instantiate(vm_name, provider) wait_for(lambda: vm2.exists, timeout=120) @request.addfinalizer def _cleanup(): vm2.cleanup_on_provider() provider.refresh_provider_relationships() return create_vm, vm2 # uncollected above in pytest_generate_tests @pytest.mark.parametrize("from_edit", [True, False], ids=["via_edit", "via_summary"]) @pytest.mark.uncollectif(lambda provider, from_edit: provider.one_of(CloudProvider) and not from_edit, reason='Cloud provider genealogy only shown on edit') @pytest.mark.parametrize('create_vm', ['small_template'], indirect=True) def test_vm_genealogy_detected( request, setup_provider, provider, small_template, soft_assert, from_edit, create_vm): """Tests vm genealogy from what CFME can detect. Prerequisities: * A provider that is set up and having suitable templates for provisioning. Steps: * Provision the VM * Then, depending on whether you want to check it via ``Genealogy`` or edit page: * Open the edit page of the VM and you can see the parent template in the dropdown. Assert that it corresponds with the template the VM was deployed from. * Open VM Genealogy via details page and see the the template being an ancestor of the VM. Note: The cloud providers appear to not have Genealogy option available in the details view. So the only possibility available is to do the check via edit form. Metadata: test_flag: genealogy, provision Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/4h """ if from_edit: create_vm.open_edit() view = navigate_to(create_vm, 'Edit') opt = view.form.parent_vm.all_selected_options[0] parent = opt.strip() assert parent.startswith(small_template.name), "The parent template not detected!" else: try: vm_crud_ancestors = create_vm.genealogy.ancestors except NameError: logger.exception("The parent template not detected!") pytest.fail("The parent template not detected!") assert small_template.name in vm_crud_ancestors, \ f"{small_template.name} is not in {create_vm.name}'s ancestors" @pytest.mark.provider([VMwareProvider]) @pytest.mark.tier(1) def test_genealogy_comparison(create_vm_with_clone, soft_assert): """ Test that compare button is enabled and the compare page is loaded when 2 VM's are compared Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/6h startsin: 5.10.4 setup: 1. Have a provider with some VMs added testSteps: 1. Set the parent-child relationship for at least two VMs 2. Open one of the VM's genealogy screen from its summary 3. Check at least two checkboxes in the genealogy tree expectedResults: 1. Genealogy set 2. Genealogy screen displayed 3. Compare button enabled Bugzilla: 1694712 """ try: compare_view = create_vm_with_clone[0].genealogy.compare(*create_vm_with_clone) assert compare_view.is_displayed except ToolbarOptionGreyedOrUnavailable: logger.exception("The compare button is disabled or unavailable") pytest.fail("The compare button is disabled or unavailable")
nachandr/cfme_tests
cfme/tests/cloud_infra_common/test_genealogy.py
cfme/fixtures/depot.py
from widgetastic.widget import View from widgetastic_patternfly import AboutModal from cfme.exceptions import ItemNotFound from cfme.utils.appliance.implementations.ui import navigate_to # MIQ/CFME about field names VERSION = 'Version' SERVER = 'Server Name' USER = 'User Name' ROLE = 'User Role' BROWSER = 'Browser' BROWSER_VERSION = 'Browser Version' BROWSER_OS = 'Browser OS' ZONE = "Zone" REGION = "Region" class MIQAboutModal(AboutModal): """Override some locators that MIQ mangles""" CLOSE_LOC = './/div[@class="modal-header"]/button[@class="close"]' class AboutView(View): """ The view for the about modal """ @property def is_displayed(self): return self.modal.is_open modal = MIQAboutModal() # 5.10 has id, 5.11 does not, wt.pf doesn't need it. def get_detail(field, server): """ Open the about modal and fetch the value for one of the fields 'title' and 'trademark' fields are allowed and get the header/footer values Raises ItemNotFound if the field isn't in the about modal :param field: string label for the detail field :return: string value from the requested field """ view = navigate_to(server, 'About') try: if field.lower() in ['title', 'trademark']: return getattr(view.modal, field.lower()) else: # this is AboutModal.items function, TODO rename return view.modal.items()[field] except (KeyError, AttributeError): raise ItemNotFound(f'No field named {field} found in "About" modal.') finally: # close since its a blocking modal and will break further navigation view.modal.close()
import fauxfactory import pytest from wait_for import wait_for from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.common.provider import BaseProvider from cfme.containers.provider.openshift import OpenshiftProvider from cfme.exceptions import ToolbarOptionGreyedOrUnavailable from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.infrastructure.provider.scvmm import SCVMMProvider from cfme.infrastructure.provider.virtualcenter import VMwareProvider from cfme.markers.env_markers.provider import providers from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.generators import random_vm_name from cfme.utils.log import logger from cfme.utils.providers import ProviderFilter pytestmark = [ pytest.mark.usefixtures('uses_infra_providers', 'uses_cloud_providers', 'provider'), pytest.mark.tier(2), pytest.mark.provider( gen_func=providers, filters=[ProviderFilter(classes=[BaseProvider]), ProviderFilter(classes=[SCVMMProvider, RHEVMProvider, OpenshiftProvider], inverted=True)], scope='module'), test_requirements.genealogy ] @pytest.fixture def create_vm_with_clone(request, create_vm, provider, appliance): """Fixture to provision a VM and clone it""" first_name = fauxfactory.gen_alphanumeric() last_name = fauxfactory.gen_alphanumeric() email = "{first_name}.{last_name}@test.com" provision_type = 'VMware' vm_name = random_vm_name(context=None, max_length=15) create_vm.clone_vm(email, first_name, last_name, vm_name, provision_type) vm2 = appliance.collections.infra_vms.instantiate(vm_name, provider) wait_for(lambda: vm2.exists, timeout=120) @request.addfinalizer def _cleanup(): vm2.cleanup_on_provider() provider.refresh_provider_relationships() return create_vm, vm2 # uncollected above in pytest_generate_tests @pytest.mark.parametrize("from_edit", [True, False], ids=["via_edit", "via_summary"]) @pytest.mark.uncollectif(lambda provider, from_edit: provider.one_of(CloudProvider) and not from_edit, reason='Cloud provider genealogy only shown on edit') @pytest.mark.parametrize('create_vm', ['small_template'], indirect=True) def test_vm_genealogy_detected( request, setup_provider, provider, small_template, soft_assert, from_edit, create_vm): """Tests vm genealogy from what CFME can detect. Prerequisities: * A provider that is set up and having suitable templates for provisioning. Steps: * Provision the VM * Then, depending on whether you want to check it via ``Genealogy`` or edit page: * Open the edit page of the VM and you can see the parent template in the dropdown. Assert that it corresponds with the template the VM was deployed from. * Open VM Genealogy via details page and see the the template being an ancestor of the VM. Note: The cloud providers appear to not have Genealogy option available in the details view. So the only possibility available is to do the check via edit form. Metadata: test_flag: genealogy, provision Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/4h """ if from_edit: create_vm.open_edit() view = navigate_to(create_vm, 'Edit') opt = view.form.parent_vm.all_selected_options[0] parent = opt.strip() assert parent.startswith(small_template.name), "The parent template not detected!" else: try: vm_crud_ancestors = create_vm.genealogy.ancestors except NameError: logger.exception("The parent template not detected!") pytest.fail("The parent template not detected!") assert small_template.name in vm_crud_ancestors, \ f"{small_template.name} is not in {create_vm.name}'s ancestors" @pytest.mark.provider([VMwareProvider]) @pytest.mark.tier(1) def test_genealogy_comparison(create_vm_with_clone, soft_assert): """ Test that compare button is enabled and the compare page is loaded when 2 VM's are compared Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/6h startsin: 5.10.4 setup: 1. Have a provider with some VMs added testSteps: 1. Set the parent-child relationship for at least two VMs 2. Open one of the VM's genealogy screen from its summary 3. Check at least two checkboxes in the genealogy tree expectedResults: 1. Genealogy set 2. Genealogy screen displayed 3. Compare button enabled Bugzilla: 1694712 """ try: compare_view = create_vm_with_clone[0].genealogy.compare(*create_vm_with_clone) assert compare_view.is_displayed except ToolbarOptionGreyedOrUnavailable: logger.exception("The compare button is disabled or unavailable") pytest.fail("The compare button is disabled or unavailable")
nachandr/cfme_tests
cfme/tests/cloud_infra_common/test_genealogy.py
cfme/configure/about.py
import attr from widgetastic.exceptions import NoSuchElementException from wrapanapi.systems import VMWareSystem from cfme.common.candu_views import VMUtilizationView from cfme.common.provider import DefaultEndpoint from cfme.common.provider import DefaultEndpointForm from cfme.common.provider import VMRCEndpoint from cfme.exceptions import ItemNotFound from cfme.infrastructure.provider import InfraProvider from cfme.services.catalogs.catalog_items import VMwareCatalogItem from widgetastic_manageiq import LineChart class VirtualCenterEndpoint(DefaultEndpoint): pass class VirtualCenterEndpointForm(DefaultEndpointForm): pass class VirtualCenterVMUtilizationView(VMUtilizationView): """A VM Utilization view for virtual center providers""" vm_cpu = LineChart(id='miq_chart_parent_candu_0') vm_cpu_state = LineChart(id='miq_chart_parent_candu_1') vm_memory = LineChart(id='miq_chart_parent_candu_2') vm_disk = LineChart(id='miq_chart_parent_candu_3') vm_network = LineChart(id='miq_chart_parent_candu_4') @attr.s(eq=False) class VMwareProvider(InfraProvider): catalog_item_type = VMwareCatalogItem vm_utilization_view = VirtualCenterVMUtilizationView type_name = "virtualcenter" mgmt_class = VMWareSystem db_types = ["Vmware::InfraManager"] endpoints_form = VirtualCenterEndpointForm ems_pretty_name = 'VMware vCenter' discover_dict = {"vmware": True} settings_key = 'ems_vmware' # xpath locators for elements, to be used by selenium _console_connection_status_element = '//*[@id="connection-status"]|//*[@id="noVNC_status"]' _canvas_element = ('(//*[@id="remote-console" or @id="wmksContainer"]/canvas|' '//*[@id="noVNC_canvas"])') _ctrl_alt_del_xpath = '(//*[@id="ctrlaltdel"]|//*[@id="sendCtrlAltDelButton"])' _fullscreen_xpath = '//*[@id="fullscreen"]' bad_credentials_error_msg = 'Cannot complete login due to an incorrect user name or password.' log_name = 'vim' _console_type = '//*[@id="console-type"]' ems_events = [ ('vm_create', {'event_type': 'VmDeployedEvent', 'dest_vm_or_template_id': None}), ('vm_stop', {'event_type': 'VmPoweredOffEvent', 'vm_or_template_id': None}), ('vm_start', {'event_type': 'VmPoweredOnEvent', 'vm_or_template_id': None}), ('vm_delete', {'event_type': 'VmRemovedEvent', 'vm_or_template_id': None}) ] def deployment_helper(self, deploy_args): """ Used in utils.virtual_machines """ # Called within a dictionary update. Since we want to remove key/value pairs, return the # entire dictionary deploy_args.pop('username', None) deploy_args.pop('password', None) if "allowed_datastores" not in deploy_args and "allowed_datastores" in self.data: deploy_args['allowed_datastores'] = self.data['allowed_datastores'] return deploy_args @classmethod def from_config(cls, prov_config, prov_key, appliance=None): appliance = appliance or cls.appliance endpoints = { VirtualCenterEndpoint.name: VirtualCenterEndpoint(**prov_config['endpoints']['default']) } vmrc_endpoint_config = prov_config["endpoints"].get(VMRCEndpoint.name, {}) if vmrc_endpoint_config: endpoints[VMRCEndpoint.name] = VMRCEndpoint(**vmrc_endpoint_config) if prov_config.get('discovery_range'): start_ip = prov_config['discovery_range']['start'] end_ip = prov_config['discovery_range']['end'] else: start_ip = end_ip = prov_config.get('ipaddress') return appliance.collections.infra_providers.instantiate( prov_class=cls, name=prov_config['name'], endpoints=endpoints, zone=prov_config['server_zone'], key=prov_key, start_ip=start_ip, end_ip=end_ip) @property def view_value_mapping(self): return dict(name=self.name, prov_type='VMware vCenter') # Following methods will only work if the remote console window is open # and if selenium focused on it. These will not work if the selenium is # focused on Appliance window. def _try_element_lookup(self, xpath): try: return self.appliance.browser.widgetastic.selenium.find_element_by_xpath(xpath) except NoSuchElementException: raise ItemNotFound("Element not found on screen, is current focus on console window?") def get_console_connection_status(self): return self._try_element_lookup(self._console_connection_status_element).text def get_remote_console_canvas(self): return self._try_element_lookup(self._canvas_element) def get_console_ctrl_alt_del_btn(self): return self._try_element_lookup(self._ctrl_alt_del_xpath) def get_console_fullscreen_btn(self): return self._try_element_lookup(self._fullscreen_xpath) def get_console_type_name(self): return self._try_element_lookup(self._console_type).text
import fauxfactory import pytest from wait_for import wait_for from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.common.provider import BaseProvider from cfme.containers.provider.openshift import OpenshiftProvider from cfme.exceptions import ToolbarOptionGreyedOrUnavailable from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.infrastructure.provider.scvmm import SCVMMProvider from cfme.infrastructure.provider.virtualcenter import VMwareProvider from cfme.markers.env_markers.provider import providers from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.generators import random_vm_name from cfme.utils.log import logger from cfme.utils.providers import ProviderFilter pytestmark = [ pytest.mark.usefixtures('uses_infra_providers', 'uses_cloud_providers', 'provider'), pytest.mark.tier(2), pytest.mark.provider( gen_func=providers, filters=[ProviderFilter(classes=[BaseProvider]), ProviderFilter(classes=[SCVMMProvider, RHEVMProvider, OpenshiftProvider], inverted=True)], scope='module'), test_requirements.genealogy ] @pytest.fixture def create_vm_with_clone(request, create_vm, provider, appliance): """Fixture to provision a VM and clone it""" first_name = fauxfactory.gen_alphanumeric() last_name = fauxfactory.gen_alphanumeric() email = "{first_name}.{last_name}@test.com" provision_type = 'VMware' vm_name = random_vm_name(context=None, max_length=15) create_vm.clone_vm(email, first_name, last_name, vm_name, provision_type) vm2 = appliance.collections.infra_vms.instantiate(vm_name, provider) wait_for(lambda: vm2.exists, timeout=120) @request.addfinalizer def _cleanup(): vm2.cleanup_on_provider() provider.refresh_provider_relationships() return create_vm, vm2 # uncollected above in pytest_generate_tests @pytest.mark.parametrize("from_edit", [True, False], ids=["via_edit", "via_summary"]) @pytest.mark.uncollectif(lambda provider, from_edit: provider.one_of(CloudProvider) and not from_edit, reason='Cloud provider genealogy only shown on edit') @pytest.mark.parametrize('create_vm', ['small_template'], indirect=True) def test_vm_genealogy_detected( request, setup_provider, provider, small_template, soft_assert, from_edit, create_vm): """Tests vm genealogy from what CFME can detect. Prerequisities: * A provider that is set up and having suitable templates for provisioning. Steps: * Provision the VM * Then, depending on whether you want to check it via ``Genealogy`` or edit page: * Open the edit page of the VM and you can see the parent template in the dropdown. Assert that it corresponds with the template the VM was deployed from. * Open VM Genealogy via details page and see the the template being an ancestor of the VM. Note: The cloud providers appear to not have Genealogy option available in the details view. So the only possibility available is to do the check via edit form. Metadata: test_flag: genealogy, provision Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/4h """ if from_edit: create_vm.open_edit() view = navigate_to(create_vm, 'Edit') opt = view.form.parent_vm.all_selected_options[0] parent = opt.strip() assert parent.startswith(small_template.name), "The parent template not detected!" else: try: vm_crud_ancestors = create_vm.genealogy.ancestors except NameError: logger.exception("The parent template not detected!") pytest.fail("The parent template not detected!") assert small_template.name in vm_crud_ancestors, \ f"{small_template.name} is not in {create_vm.name}'s ancestors" @pytest.mark.provider([VMwareProvider]) @pytest.mark.tier(1) def test_genealogy_comparison(create_vm_with_clone, soft_assert): """ Test that compare button is enabled and the compare page is loaded when 2 VM's are compared Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/6h startsin: 5.10.4 setup: 1. Have a provider with some VMs added testSteps: 1. Set the parent-child relationship for at least two VMs 2. Open one of the VM's genealogy screen from its summary 3. Check at least two checkboxes in the genealogy tree expectedResults: 1. Genealogy set 2. Genealogy screen displayed 3. Compare button enabled Bugzilla: 1694712 """ try: compare_view = create_vm_with_clone[0].genealogy.compare(*create_vm_with_clone) assert compare_view.is_displayed except ToolbarOptionGreyedOrUnavailable: logger.exception("The compare button is disabled or unavailable") pytest.fail("The compare button is disabled or unavailable")
nachandr/cfme_tests
cfme/tests/cloud_infra_common/test_genealogy.py
cfme/infrastructure/provider/virtualcenter.py
import attr from cfme.utils.appliance.plugin import AppliancePlugin from cfme.utils.appliance.plugin import AppliancePluginException from cfme.utils.log import logger_wrap from cfme.utils.quote import quote from cfme.utils.wait import wait_for class SystemdException(AppliancePluginException): pass @attr.s class SystemdService(AppliancePlugin): unit_name = attr.ib(type=str) @logger_wrap('SystemdService command runner: {}') def _run_service_command( self, command, expected_exit_code=None, unit_name=None, log_callback=None ): """Wrapper around running the command and raising exception on unexpected code Args: command: string command for systemd (stop, start, restart, etc) expected_exit_code: the exit code to expect, otherwise raise unit_name: optional unit name, defaults to self.unit_name attribute log_callback: logger to log against Raises: SystemdException: When expected_exit_code is not matched """ unit = self.unit_name if unit_name is None else unit_name with self.appliance.ssh_client as ssh: cmd = 'systemctl {} {}'.format(quote(command), quote(unit)) log_callback(f'Running {cmd}') result = ssh.run_command(cmd, container=self.appliance.ansible_pod_name) if expected_exit_code is not None and result.rc != expected_exit_code: # TODO: Bring back address msg = 'Failed to {} {}\nError: {}'.format( command, self.unit_name, result.output) if log_callback: log_callback(msg) else: self.logger.error(msg) raise SystemdException(msg) return result def stop(self, log_callback=None): return self._run_service_command( 'stop', expected_exit_code=0, log_callback=log_callback ) def start(self, log_callback=None): return self._run_service_command( 'start', expected_exit_code=0, log_callback=log_callback ) def restart(self, log_callback=None): return self._run_service_command( 'restart', expected_exit_code=0, log_callback=log_callback ) def reload(self, log_callback=None): return self._run_service_command( 'reload', expected_exit_code=0, log_callback=log_callback ) def enable(self, log_callback=None): return self._run_service_command( 'enable', expected_exit_code=0, log_callback=log_callback ) @property def enabled(self): return self._run_service_command('is-enabled').rc == 0 @property def is_active(self): return self._run_service_command('is-active').rc == 0 @property def running(self): return self._run_service_command("status").rc == 0 def wait_for_running(self, timeout=600): result, wait = wait_for( lambda: self.running, num_sec=timeout, fail_condition=False, delay=5, ) return result def daemon_reload(self, log_callback=None): """Call daemon-reload, no unit name for this""" return self._run_service_command( command='daemon-reload', expected_exit_code=0, unit_name='', log_callback=log_callback )
import fauxfactory import pytest from wait_for import wait_for from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.common.provider import BaseProvider from cfme.containers.provider.openshift import OpenshiftProvider from cfme.exceptions import ToolbarOptionGreyedOrUnavailable from cfme.infrastructure.provider.rhevm import RHEVMProvider from cfme.infrastructure.provider.scvmm import SCVMMProvider from cfme.infrastructure.provider.virtualcenter import VMwareProvider from cfme.markers.env_markers.provider import providers from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.generators import random_vm_name from cfme.utils.log import logger from cfme.utils.providers import ProviderFilter pytestmark = [ pytest.mark.usefixtures('uses_infra_providers', 'uses_cloud_providers', 'provider'), pytest.mark.tier(2), pytest.mark.provider( gen_func=providers, filters=[ProviderFilter(classes=[BaseProvider]), ProviderFilter(classes=[SCVMMProvider, RHEVMProvider, OpenshiftProvider], inverted=True)], scope='module'), test_requirements.genealogy ] @pytest.fixture def create_vm_with_clone(request, create_vm, provider, appliance): """Fixture to provision a VM and clone it""" first_name = fauxfactory.gen_alphanumeric() last_name = fauxfactory.gen_alphanumeric() email = "{first_name}.{last_name}@test.com" provision_type = 'VMware' vm_name = random_vm_name(context=None, max_length=15) create_vm.clone_vm(email, first_name, last_name, vm_name, provision_type) vm2 = appliance.collections.infra_vms.instantiate(vm_name, provider) wait_for(lambda: vm2.exists, timeout=120) @request.addfinalizer def _cleanup(): vm2.cleanup_on_provider() provider.refresh_provider_relationships() return create_vm, vm2 # uncollected above in pytest_generate_tests @pytest.mark.parametrize("from_edit", [True, False], ids=["via_edit", "via_summary"]) @pytest.mark.uncollectif(lambda provider, from_edit: provider.one_of(CloudProvider) and not from_edit, reason='Cloud provider genealogy only shown on edit') @pytest.mark.parametrize('create_vm', ['small_template'], indirect=True) def test_vm_genealogy_detected( request, setup_provider, provider, small_template, soft_assert, from_edit, create_vm): """Tests vm genealogy from what CFME can detect. Prerequisities: * A provider that is set up and having suitable templates for provisioning. Steps: * Provision the VM * Then, depending on whether you want to check it via ``Genealogy`` or edit page: * Open the edit page of the VM and you can see the parent template in the dropdown. Assert that it corresponds with the template the VM was deployed from. * Open VM Genealogy via details page and see the the template being an ancestor of the VM. Note: The cloud providers appear to not have Genealogy option available in the details view. So the only possibility available is to do the check via edit form. Metadata: test_flag: genealogy, provision Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/4h """ if from_edit: create_vm.open_edit() view = navigate_to(create_vm, 'Edit') opt = view.form.parent_vm.all_selected_options[0] parent = opt.strip() assert parent.startswith(small_template.name), "The parent template not detected!" else: try: vm_crud_ancestors = create_vm.genealogy.ancestors except NameError: logger.exception("The parent template not detected!") pytest.fail("The parent template not detected!") assert small_template.name in vm_crud_ancestors, \ f"{small_template.name} is not in {create_vm.name}'s ancestors" @pytest.mark.provider([VMwareProvider]) @pytest.mark.tier(1) def test_genealogy_comparison(create_vm_with_clone, soft_assert): """ Test that compare button is enabled and the compare page is loaded when 2 VM's are compared Polarion: assignee: spusater casecomponent: Infra caseimportance: medium initialEstimate: 1/6h startsin: 5.10.4 setup: 1. Have a provider with some VMs added testSteps: 1. Set the parent-child relationship for at least two VMs 2. Open one of the VM's genealogy screen from its summary 3. Check at least two checkboxes in the genealogy tree expectedResults: 1. Genealogy set 2. Genealogy screen displayed 3. Compare button enabled Bugzilla: 1694712 """ try: compare_view = create_vm_with_clone[0].genealogy.compare(*create_vm_with_clone) assert compare_view.is_displayed except ToolbarOptionGreyedOrUnavailable: logger.exception("The compare button is disabled or unavailable") pytest.fail("The compare button is disabled or unavailable")
nachandr/cfme_tests
cfme/tests/cloud_infra_common/test_genealogy.py
cfme/utils/appliance/services.py
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Functions that return config-related completion models.""" from qutebrowser.config import configdata, configexc from qutebrowser.completion.models import completionmodel, listcategory, util from qutebrowser.commands import runners, cmdexc from qutebrowser.keyinput import keyutils def option(*, info): """A CompletionModel filled with settings and their descriptions.""" model = completionmodel.CompletionModel(column_widths=(20, 70, 10)) options = ((opt.name, opt.description, info.config.get_str(opt.name)) for opt in configdata.DATA.values() if not opt.no_autoconfig) model.add_category(listcategory.ListCategory("Options", options)) return model def customized_option(*, info): """A CompletionModel filled with set settings and their descriptions.""" model = completionmodel.CompletionModel(column_widths=(20, 70, 10)) options = ((values.opt.name, values.opt.description, info.config.get_str(values.opt.name)) for values in info.config if values) model.add_category(listcategory.ListCategory("Customized options", options)) return model def value(optname, *values, info): """A CompletionModel filled with setting values. Args: optname: The name of the config option this model shows. values: The values already provided on the command line. info: A CompletionInfo instance. """ model = completionmodel.CompletionModel(column_widths=(30, 70, 0)) try: current = info.config.get_str(optname) except configexc.NoOptionError: return None opt = info.config.get_opt(optname) default = opt.typ.to_str(opt.default) cur_def = [] if current not in values: cur_def.append((current, "Current value")) if default not in values: cur_def.append((default, "Default value")) if cur_def: cur_cat = listcategory.ListCategory("Current/Default", cur_def) model.add_category(cur_cat) vals = opt.typ.complete() or [] vals = [x for x in vals if x[0] not in values] if vals: model.add_category(listcategory.ListCategory("Completions", vals)) return model def _bind_current_default(key, info): """Get current/default data for the given key.""" data = [] try: seq = keyutils.KeySequence.parse(key) except keyutils.KeyParseError as e: data.append(('', str(e), key)) return data cmd_text = info.keyconf.get_command(seq, 'normal') if cmd_text: parser = runners.CommandParser() try: cmd = parser.parse(cmd_text).cmd except cmdexc.NoSuchCommandError: data.append((cmd_text, '(Current) Invalid command!', key)) else: data.append((cmd_text, '(Current) {}'.format(cmd.desc), key)) cmd_text = info.keyconf.get_command(seq, 'normal', default=True) if cmd_text: parser = runners.CommandParser() cmd = parser.parse(cmd_text).cmd data.append((cmd_text, '(Default) {}'.format(cmd.desc), key)) return data def bind(key, *, info): """A CompletionModel filled with all bindable commands and descriptions. Args: key: the key being bound. """ model = completionmodel.CompletionModel(column_widths=(20, 60, 20)) data = _bind_current_default(key, info) if data: model.add_category(listcategory.ListCategory("Current/Default", data)) cmdlist = util.get_cmd_completions(info, include_hidden=True, include_aliases=True) model.add_category(listcategory.ListCategory("Commands", cmdlist)) return model
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2017-2018 Ryan Roden-Corrent (rcorre) <ryan@rcorre.net> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Tests for CompletionFilterModel.""" import pytest from qutebrowser.completion.models import listcategory @pytest.mark.parametrize('pattern, before, after, after_nosort', [ ('foo', [('foo', ''), ('bar', '')], [('foo', '')], [('foo', '')]), ('foo', [('foob', ''), ('fooc', ''), ('fooa', '')], [('fooa', ''), ('foob', ''), ('fooc', '')], [('foob', ''), ('fooc', ''), ('fooa', '')]), # prefer foobar as it starts with the pattern ('foo', [('barfoo', ''), ('foobaz', ''), ('foobar', '')], [('foobar', ''), ('foobaz', ''), ('barfoo', '')], [('foobaz', ''), ('foobar', ''), ('barfoo', '')]), ('foo', [('foo', 'bar'), ('bar', 'foo'), ('bar', 'bar')], [('foo', 'bar'), ('bar', 'foo')], [('foo', 'bar'), ('bar', 'foo')]), ]) def test_set_pattern(pattern, before, after, after_nosort, model_validator): """Validate the filtering and sorting results of set_pattern.""" cat = listcategory.ListCategory('Foo', before) model_validator.set_model(cat) cat.set_pattern(pattern) model_validator.validate(after) cat = listcategory.ListCategory('Foo', before, sort=False) model_validator.set_model(cat) cat.set_pattern(pattern) model_validator.validate(after_nosort)
airodactyl/qutebrowser
tests/unit/completion/test_listcategory.py
qutebrowser/completion/models/configmodel.py
from warnings import catch_warnings import numpy as np from pandas.core.dtypes import generic as gt import pandas as pd import pandas._testing as tm class TestABCClasses: tuples = [[1, 2, 2], ["red", "blue", "red"]] multi_index = pd.MultiIndex.from_arrays(tuples, names=("number", "color")) datetime_index = pd.to_datetime(["2000/1/1", "2010/1/1"]) timedelta_index = pd.to_timedelta(np.arange(5), unit="s") period_index = pd.period_range("2000/1/1", "2010/1/1/", freq="M") categorical = pd.Categorical([1, 2, 3], categories=[2, 3, 1]) categorical_df = pd.DataFrame({"values": [1, 2, 3]}, index=categorical) df = pd.DataFrame({"names": ["a", "b", "c"]}, index=multi_index) sparse_array = pd.arrays.SparseArray(np.random.randn(10)) datetime_array = pd.core.arrays.DatetimeArray(datetime_index) timedelta_array = pd.core.arrays.TimedeltaArray(timedelta_index) def test_abc_types(self): assert isinstance(pd.Index(["a", "b", "c"]), gt.ABCIndex) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCInt64Index) assert isinstance(pd.UInt64Index([1, 2, 3]), gt.ABCUInt64Index) assert isinstance(pd.Float64Index([1, 2, 3]), gt.ABCFloat64Index) assert isinstance(self.multi_index, gt.ABCMultiIndex) assert isinstance(self.datetime_index, gt.ABCDatetimeIndex) assert isinstance(self.timedelta_index, gt.ABCTimedeltaIndex) assert isinstance(self.period_index, gt.ABCPeriodIndex) assert isinstance(self.categorical_df.index, gt.ABCCategoricalIndex) assert isinstance(pd.Index(["a", "b", "c"]), gt.ABCIndexClass) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCIndexClass) assert isinstance(pd.Series([1, 2, 3]), gt.ABCSeries) assert isinstance(self.df, gt.ABCDataFrame) assert isinstance(self.sparse_array, gt.ABCExtensionArray) assert isinstance(self.categorical, gt.ABCCategorical) assert isinstance(self.datetime_array, gt.ABCDatetimeArray) assert not isinstance(self.datetime_index, gt.ABCDatetimeArray) assert isinstance(self.timedelta_array, gt.ABCTimedeltaArray) assert not isinstance(self.timedelta_index, gt.ABCTimedeltaArray) def test_setattr_warnings(): # GH7175 - GOTCHA: You can't use dot notation to add a column... d = { "one": pd.Series([1.0, 2.0, 3.0], index=["a", "b", "c"]), "two": pd.Series([1.0, 2.0, 3.0, 4.0], index=["a", "b", "c", "d"]), } df = pd.DataFrame(d) with catch_warnings(record=True) as w: # successfully add new column # this should not raise a warning df["three"] = df.two + 1 assert len(w) == 0 assert df.three.sum() > df.two.sum() with catch_warnings(record=True) as w: # successfully modify column in place # this should not raise a warning df.one += 1 assert len(w) == 0 assert df.one.iloc[0] == 2 with catch_warnings(record=True) as w: # successfully add an attribute to a series # this should not raise a warning df.two.not_an_index = [1, 2] assert len(w) == 0 with tm.assert_produces_warning(UserWarning): # warn when setting column to nonexistent name df.four = df.two + 2 assert df.four.sum() > df.two.sum()
import numpy as np import pytest from pandas._libs.tslibs import IncompatibleFrequency from pandas import Index, PeriodIndex, period_range import pandas._testing as tm class TestJoin: def test_joins(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") joined = index.join(index[:-5], how=join_type) assert isinstance(joined, PeriodIndex) assert joined.freq == index.freq def test_join_self(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") res = index.join(index, how=join_type) assert index is res def test_join_does_not_recur(self): df = tm.makeCustomDataframe( 3, 2, data_gen_f=lambda *args: np.random.randint(2), c_idx_type="p", r_idx_type="dt", ) s = df.iloc[:2, 0] res = s.index.join(df.columns, how="outer") expected = Index([s.index[0], s.index[1], df.columns[0], df.columns[1]], object) tm.assert_index_equal(res, expected) def test_join_mismatched_freq_raises(self): index = period_range("1/1/2000", "1/20/2000", freq="D") index3 = period_range("1/1/2000", "1/20/2000", freq="2D") msg = r".*Input has different freq=2D from PeriodIndex\(freq=D\)" with pytest.raises(IncompatibleFrequency, match=msg): index.join(index3)
TomAugspurger/pandas
pandas/tests/indexes/period/test_join.py
pandas/tests/dtypes/test_generic.py
import pandas as pd import pandas._testing as tm class TestUnaryOps: def test_invert(self): a = pd.array([True, False, None], dtype="boolean") expected = pd.array([False, True, None], dtype="boolean") tm.assert_extension_array_equal(~a, expected) expected = pd.Series(expected, index=["a", "b", "c"], name="name") result = ~pd.Series(a, index=["a", "b", "c"], name="name") tm.assert_series_equal(result, expected) df = pd.DataFrame({"A": a, "B": [True, False, False]}, index=["a", "b", "c"]) result = ~df expected = pd.DataFrame( {"A": expected, "B": [False, True, True]}, index=["a", "b", "c"] ) tm.assert_frame_equal(result, expected)
import numpy as np import pytest from pandas._libs.tslibs import IncompatibleFrequency from pandas import Index, PeriodIndex, period_range import pandas._testing as tm class TestJoin: def test_joins(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") joined = index.join(index[:-5], how=join_type) assert isinstance(joined, PeriodIndex) assert joined.freq == index.freq def test_join_self(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") res = index.join(index, how=join_type) assert index is res def test_join_does_not_recur(self): df = tm.makeCustomDataframe( 3, 2, data_gen_f=lambda *args: np.random.randint(2), c_idx_type="p", r_idx_type="dt", ) s = df.iloc[:2, 0] res = s.index.join(df.columns, how="outer") expected = Index([s.index[0], s.index[1], df.columns[0], df.columns[1]], object) tm.assert_index_equal(res, expected) def test_join_mismatched_freq_raises(self): index = period_range("1/1/2000", "1/20/2000", freq="D") index3 = period_range("1/1/2000", "1/20/2000", freq="2D") msg = r".*Input has different freq=2D from PeriodIndex\(freq=D\)" with pytest.raises(IncompatibleFrequency, match=msg): index.join(index3)
TomAugspurger/pandas
pandas/tests/indexes/period/test_join.py
pandas/tests/arrays/boolean/test_ops.py
from contextlib import contextmanager from pandas.plotting._core import _get_plot_backend def table(ax, data, rowLabels=None, colLabels=None, **kwargs): """ Helper function to convert DataFrame and Series to matplotlib.table. Parameters ---------- ax : Matplotlib axes object data : DataFrame or Series Data for table contents. **kwargs Keyword arguments to be passed to matplotlib.table.table. If `rowLabels` or `colLabels` is not specified, data index or column name will be used. Returns ------- matplotlib table object """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.table( ax=ax, data=data, rowLabels=None, colLabels=None, **kwargs ) def register(): """ Register pandas formatters and converters with matplotlib. This function modifies the global ``matplotlib.units.registry`` dictionary. pandas adds custom converters for * pd.Timestamp * pd.Period * np.datetime64 * datetime.datetime * datetime.date * datetime.time See Also -------- deregister_matplotlib_converters : Remove pandas formatters and converters. """ plot_backend = _get_plot_backend("matplotlib") plot_backend.register() def deregister(): """ Remove pandas formatters and converters. Removes the custom converters added by :func:`register`. This attempts to set the state of the registry back to the state before pandas registered its own units. Converters for pandas' own types like Timestamp and Period are removed completely. Converters for types pandas overwrites, like ``datetime.datetime``, are restored to their original value. See Also -------- register_matplotlib_converters : Register pandas formatters and converters with matplotlib. """ plot_backend = _get_plot_backend("matplotlib") plot_backend.deregister() def scatter_matrix( frame, alpha=0.5, figsize=None, ax=None, grid=False, diagonal="hist", marker=".", density_kwds=None, hist_kwds=None, range_padding=0.05, **kwargs, ): """ Draw a matrix of scatter plots. Parameters ---------- frame : DataFrame alpha : float, optional Amount of transparency applied. figsize : (float,float), optional A tuple (width, height) in inches. ax : Matplotlib axis object, optional grid : bool, optional Setting this to True will show the grid. diagonal : {'hist', 'kde'} Pick between 'kde' and 'hist' for either Kernel Density Estimation or Histogram plot in the diagonal. marker : str, optional Matplotlib marker type, default '.'. density_kwds : keywords Keyword arguments to be passed to kernel density estimate plot. hist_kwds : keywords Keyword arguments to be passed to hist function. range_padding : float, default 0.05 Relative extension of axis range in x and y with respect to (x_max - x_min) or (y_max - y_min). **kwargs Keyword arguments to be passed to scatter function. Returns ------- numpy.ndarray A matrix of scatter plots. Examples -------- .. plot:: :context: close-figs >>> df = pd.DataFrame(np.random.randn(1000, 4), columns=['A','B','C','D']) >>> pd.plotting.scatter_matrix(df, alpha=0.2) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.scatter_matrix( frame=frame, alpha=alpha, figsize=figsize, ax=ax, grid=grid, diagonal=diagonal, marker=marker, density_kwds=density_kwds, hist_kwds=hist_kwds, range_padding=range_padding, **kwargs, ) def radviz(frame, class_column, ax=None, color=None, colormap=None, **kwds): """ Plot a multidimensional dataset in 2D. Each Series in the DataFrame is represented as a evenly distributed slice on a circle. Each data point is rendered in the circle according to the value on each Series. Highly correlated `Series` in the `DataFrame` are placed closer on the unit circle. RadViz allow to project a N-dimensional data set into a 2D space where the influence of each dimension can be interpreted as a balance between the influence of all dimensions. More info available at the `original article <https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.135.889>`_ describing RadViz. Parameters ---------- frame : `DataFrame` pandas object holding the data. class_column : str Column name containing the name of the data point category. ax : :class:`matplotlib.axes.Axes`, optional A plot instance to which to add the information. color : list[str] or tuple[str], optional Assign a color to each category. Example: ['blue', 'green']. colormap : str or :class:`matplotlib.colors.Colormap`, default None Colormap to select colors from. If string, load colormap with that name from matplotlib. **kwds Options to pass to matplotlib scatter plotting method. Returns ------- class:`matplotlib.axes.Axes` See Also -------- plotting.andrews_curves : Plot clustering visualization. Examples -------- .. plot:: :context: close-figs >>> df = pd.DataFrame( ... { ... 'SepalLength': [6.5, 7.7, 5.1, 5.8, 7.6, 5.0, 5.4, 4.6, 6.7, 4.6], ... 'SepalWidth': [3.0, 3.8, 3.8, 2.7, 3.0, 2.3, 3.0, 3.2, 3.3, 3.6], ... 'PetalLength': [5.5, 6.7, 1.9, 5.1, 6.6, 3.3, 4.5, 1.4, 5.7, 1.0], ... 'PetalWidth': [1.8, 2.2, 0.4, 1.9, 2.1, 1.0, 1.5, 0.2, 2.1, 0.2], ... 'Category': [ ... 'virginica', ... 'virginica', ... 'setosa', ... 'virginica', ... 'virginica', ... 'versicolor', ... 'versicolor', ... 'setosa', ... 'virginica', ... 'setosa' ... ] ... } ... ) >>> pd.plotting.radviz(df, 'Category') """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.radviz( frame=frame, class_column=class_column, ax=ax, color=color, colormap=colormap, **kwds, ) def andrews_curves( frame, class_column, ax=None, samples=200, color=None, colormap=None, **kwargs ): """ Generate a matplotlib plot of Andrews curves, for visualising clusters of multivariate data. Andrews curves have the functional form: f(t) = x_1/sqrt(2) + x_2 sin(t) + x_3 cos(t) + x_4 sin(2t) + x_5 cos(2t) + ... Where x coefficients correspond to the values of each dimension and t is linearly spaced between -pi and +pi. Each row of frame then corresponds to a single curve. Parameters ---------- frame : DataFrame Data to be plotted, preferably normalized to (0.0, 1.0). class_column : Name of the column containing class names ax : matplotlib axes object, default None samples : Number of points to plot in each curve color : list or tuple, optional Colors to use for the different classes. colormap : str or matplotlib colormap object, default None Colormap to select colors from. If string, load colormap with that name from matplotlib. **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlip.axis.Axes` Examples -------- .. plot:: :context: close-figs >>> df = pd.read_csv( ... 'https://raw.github.com/pandas-dev/' ... 'pandas/master/pandas/tests/io/data/csv/iris.csv' ... ) >>> pd.plotting.andrews_curves(df, 'Name') """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.andrews_curves( frame=frame, class_column=class_column, ax=ax, samples=samples, color=color, colormap=colormap, **kwargs, ) def bootstrap_plot(series, fig=None, size=50, samples=500, **kwds): """ Bootstrap plot on mean, median and mid-range statistics. The bootstrap plot is used to estimate the uncertainty of a statistic by relaying on random sampling with replacement [1]_. This function will generate bootstrapping plots for mean, median and mid-range statistics for the given number of samples of the given size. .. [1] "Bootstrapping (statistics)" in \ https://en.wikipedia.org/wiki/Bootstrapping_%28statistics%29 Parameters ---------- series : pandas.Series pandas Series from where to get the samplings for the bootstrapping. fig : matplotlib.figure.Figure, default None If given, it will use the `fig` reference for plotting instead of creating a new one with default parameters. size : int, default 50 Number of data points to consider during each sampling. It must be greater or equal than the length of the `series`. samples : int, default 500 Number of times the bootstrap procedure is performed. **kwds Options to pass to matplotlib plotting method. Returns ------- matplotlib.figure.Figure Matplotlib figure. See Also -------- DataFrame.plot : Basic plotting for DataFrame objects. Series.plot : Basic plotting for Series objects. Examples -------- This example draws a basic bootstap plot for a Series. .. plot:: :context: close-figs >>> s = pd.Series(np.random.uniform(size=100)) >>> pd.plotting.bootstrap_plot(s) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.bootstrap_plot( series=series, fig=fig, size=size, samples=samples, **kwds ) def parallel_coordinates( frame, class_column, cols=None, ax=None, color=None, use_columns=False, xticks=None, colormap=None, axvlines=True, axvlines_kwds=None, sort_labels=False, **kwargs, ): """ Parallel coordinates plotting. Parameters ---------- frame : DataFrame class_column : str Column name containing class names. cols : list, optional A list of column names to use. ax : matplotlib.axis, optional Matplotlib axis object. color : list or tuple, optional Colors to use for the different classes. use_columns : bool, optional If true, columns will be used as xticks. xticks : list or tuple, optional A list of values to use for xticks. colormap : str or matplotlib colormap, default None Colormap to use for line colors. axvlines : bool, optional If true, vertical lines will be added at each xtick. axvlines_kwds : keywords, optional Options to be passed to axvline method for vertical lines. sort_labels : bool, default False Sort class_column labels, useful when assigning colors. **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlib.axis.Axes` Examples -------- .. plot:: :context: close-figs >>> df = pd.read_csv( ... 'https://raw.github.com/pandas-dev/' ... 'pandas/master/pandas/tests/io/data/csv/iris.csv' ... ) >>> pd.plotting.parallel_coordinates( ... df, 'Name', color=('#556270', '#4ECDC4', '#C7F464') ... ) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.parallel_coordinates( frame=frame, class_column=class_column, cols=cols, ax=ax, color=color, use_columns=use_columns, xticks=xticks, colormap=colormap, axvlines=axvlines, axvlines_kwds=axvlines_kwds, sort_labels=sort_labels, **kwargs, ) def lag_plot(series, lag=1, ax=None, **kwds): """ Lag plot for time series. Parameters ---------- series : Time series lag : lag of the scatter plot, default 1 ax : Matplotlib axis object, optional **kwds Matplotlib scatter method keyword arguments. Returns ------- class:`matplotlib.axis.Axes` Examples -------- Lag plots are most commonly used to look for patterns in time series data. Given the following time series .. plot:: :context: close-figs >>> np.random.seed(5) >>> x = np.cumsum(np.random.normal(loc=1, scale=5, size=50)) >>> s = pd.Series(x) >>> s.plot() A lag plot with ``lag=1`` returns .. plot:: :context: close-figs >>> pd.plotting.lag_plot(s, lag=1) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.lag_plot(series=series, lag=lag, ax=ax, **kwds) def autocorrelation_plot(series, ax=None, **kwargs): """ Autocorrelation plot for time series. Parameters ---------- series : Time series ax : Matplotlib axis object, optional **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlib.axis.Axes` Examples -------- The horizontal lines in the plot correspond to 95% and 99% confidence bands. The dashed line is 99% confidence band. .. plot:: :context: close-figs >>> spacing = np.linspace(-9 * np.pi, 9 * np.pi, num=1000) >>> s = pd.Series(0.7 * np.random.rand(1000) + 0.3 * np.sin(spacing)) >>> pd.plotting.autocorrelation_plot(s) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.autocorrelation_plot(series=series, ax=ax, **kwargs) class _Options(dict): """ Stores pandas plotting options. Allows for parameter aliasing so you can just use parameter names that are the same as the plot function parameters, but is stored in a canonical format that makes it easy to breakdown into groups later. """ # alias so the names are same as plotting method parameter names _ALIASES = {"x_compat": "xaxis.compat"} _DEFAULT_KEYS = ["xaxis.compat"] def __init__(self, deprecated=False): self._deprecated = deprecated super().__setitem__("xaxis.compat", False) def __getitem__(self, key): key = self._get_canonical_key(key) if key not in self: raise ValueError(f"{key} is not a valid pandas plotting option") return super().__getitem__(key) def __setitem__(self, key, value): key = self._get_canonical_key(key) return super().__setitem__(key, value) def __delitem__(self, key): key = self._get_canonical_key(key) if key in self._DEFAULT_KEYS: raise ValueError(f"Cannot remove default parameter {key}") return super().__delitem__(key) def __contains__(self, key) -> bool: key = self._get_canonical_key(key) return super().__contains__(key) def reset(self): """ Reset the option store to its initial state Returns ------- None """ self.__init__() def _get_canonical_key(self, key): return self._ALIASES.get(key, key) @contextmanager def use(self, key, value): """ Temporarily set a parameter value using the with statement. Aliasing allowed. """ old_value = self[key] try: self[key] = value yield self finally: self[key] = old_value plot_params = _Options()
import numpy as np import pytest from pandas._libs.tslibs import IncompatibleFrequency from pandas import Index, PeriodIndex, period_range import pandas._testing as tm class TestJoin: def test_joins(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") joined = index.join(index[:-5], how=join_type) assert isinstance(joined, PeriodIndex) assert joined.freq == index.freq def test_join_self(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") res = index.join(index, how=join_type) assert index is res def test_join_does_not_recur(self): df = tm.makeCustomDataframe( 3, 2, data_gen_f=lambda *args: np.random.randint(2), c_idx_type="p", r_idx_type="dt", ) s = df.iloc[:2, 0] res = s.index.join(df.columns, how="outer") expected = Index([s.index[0], s.index[1], df.columns[0], df.columns[1]], object) tm.assert_index_equal(res, expected) def test_join_mismatched_freq_raises(self): index = period_range("1/1/2000", "1/20/2000", freq="D") index3 = period_range("1/1/2000", "1/20/2000", freq="2D") msg = r".*Input has different freq=2D from PeriodIndex\(freq=D\)" with pytest.raises(IncompatibleFrequency, match=msg): index.join(index3)
TomAugspurger/pandas
pandas/tests/indexes/period/test_join.py
pandas/plotting/_misc.py
""" Helper functions to generate range-like data for DatetimeArray (and possibly TimedeltaArray/PeriodArray) """ from typing import Union import numpy as np from pandas._libs.tslibs import OutOfBoundsDatetime, Timedelta, Timestamp from pandas.tseries.offsets import DateOffset def generate_regular_range( start: Union[Timestamp, Timedelta], end: Union[Timestamp, Timedelta], periods: int, freq: DateOffset, ): """ Generate a range of dates or timestamps with the spans between dates described by the given `freq` DateOffset. Parameters ---------- start : Timedelta, Timestamp or None First point of produced date range. end : Timedelta, Timestamp or None Last point of produced date range. periods : int Number of periods in produced date range. freq : Tick Describes space between dates in produced date range. Returns ------- ndarray[np.int64] Representing nanoseconds. """ start = start.value if start is not None else None end = end.value if end is not None else None stride = freq.nanos if periods is None: b = start # cannot just use e = Timestamp(end) + 1 because arange breaks when # stride is too large, see GH10887 e = b + (end - b) // stride * stride + stride // 2 + 1 elif start is not None: b = start e = _generate_range_overflow_safe(b, periods, stride, side="start") elif end is not None: e = end + stride b = _generate_range_overflow_safe(e, periods, stride, side="end") else: raise ValueError( "at least 'start' or 'end' should be specified if a 'period' is given." ) with np.errstate(over="raise"): # If the range is sufficiently large, np.arange may overflow # and incorrectly return an empty array if not caught. try: values = np.arange(b, e, stride, dtype=np.int64) except FloatingPointError: xdr = [b] while xdr[-1] != e: xdr.append(xdr[-1] + stride) values = np.array(xdr[:-1], dtype=np.int64) return values def _generate_range_overflow_safe( endpoint: int, periods: int, stride: int, side: str = "start" ) -> int: """ Calculate the second endpoint for passing to np.arange, checking to avoid an integer overflow. Catch OverflowError and re-raise as OutOfBoundsDatetime. Parameters ---------- endpoint : int nanosecond timestamp of the known endpoint of the desired range periods : int number of periods in the desired range stride : int nanoseconds between periods in the desired range side : {'start', 'end'} which end of the range `endpoint` refers to Returns ------- other_end : int Raises ------ OutOfBoundsDatetime """ # GH#14187 raise instead of incorrectly wrapping around assert side in ["start", "end"] i64max = np.uint64(np.iinfo(np.int64).max) msg = f"Cannot generate range with {side}={endpoint} and periods={periods}" with np.errstate(over="raise"): # if periods * strides cannot be multiplied within the *uint64* bounds, # we cannot salvage the operation by recursing, so raise try: addend = np.uint64(periods) * np.uint64(np.abs(stride)) except FloatingPointError as err: raise OutOfBoundsDatetime(msg) from err if np.abs(addend) <= i64max: # relatively easy case without casting concerns return _generate_range_overflow_safe_signed(endpoint, periods, stride, side) elif (endpoint > 0 and side == "start" and stride > 0) or ( endpoint < 0 and side == "end" and stride > 0 ): # no chance of not-overflowing raise OutOfBoundsDatetime(msg) elif side == "end" and endpoint > i64max and endpoint - stride <= i64max: # in _generate_regular_range we added `stride` thereby overflowing # the bounds. Adjust to fix this. return _generate_range_overflow_safe( endpoint - stride, periods - 1, stride, side ) # split into smaller pieces mid_periods = periods // 2 remaining = periods - mid_periods assert 0 < remaining < periods, (remaining, periods, endpoint, stride) midpoint = _generate_range_overflow_safe(endpoint, mid_periods, stride, side) return _generate_range_overflow_safe(midpoint, remaining, stride, side) def _generate_range_overflow_safe_signed( endpoint: int, periods: int, stride: int, side: str ) -> int: """ A special case for _generate_range_overflow_safe where `periods * stride` can be calculated without overflowing int64 bounds. """ assert side in ["start", "end"] if side == "end": stride *= -1 with np.errstate(over="raise"): addend = np.int64(periods) * np.int64(stride) try: # easy case with no overflows return np.int64(endpoint) + addend except (FloatingPointError, OverflowError): # with endpoint negative and addend positive we risk # FloatingPointError; with reversed signed we risk OverflowError pass # if stride and endpoint had opposite signs, then endpoint + addend # should never overflow. so they must have the same signs assert (stride > 0 and endpoint >= 0) or (stride < 0 and endpoint <= 0) if stride > 0: # watch out for very special case in which we just slightly # exceed implementation bounds, but when passing the result to # np.arange will get a result slightly within the bounds result = np.uint64(endpoint) + np.uint64(addend) i64max = np.uint64(np.iinfo(np.int64).max) assert result > i64max if result <= i64max + np.uint64(stride): return result raise OutOfBoundsDatetime( f"Cannot generate range with {side}={endpoint} and periods={periods}" )
import numpy as np import pytest from pandas._libs.tslibs import IncompatibleFrequency from pandas import Index, PeriodIndex, period_range import pandas._testing as tm class TestJoin: def test_joins(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") joined = index.join(index[:-5], how=join_type) assert isinstance(joined, PeriodIndex) assert joined.freq == index.freq def test_join_self(self, join_type): index = period_range("1/1/2000", "1/20/2000", freq="D") res = index.join(index, how=join_type) assert index is res def test_join_does_not_recur(self): df = tm.makeCustomDataframe( 3, 2, data_gen_f=lambda *args: np.random.randint(2), c_idx_type="p", r_idx_type="dt", ) s = df.iloc[:2, 0] res = s.index.join(df.columns, how="outer") expected = Index([s.index[0], s.index[1], df.columns[0], df.columns[1]], object) tm.assert_index_equal(res, expected) def test_join_mismatched_freq_raises(self): index = period_range("1/1/2000", "1/20/2000", freq="D") index3 = period_range("1/1/2000", "1/20/2000", freq="2D") msg = r".*Input has different freq=2D from PeriodIndex\(freq=D\)" with pytest.raises(IncompatibleFrequency, match=msg): index.join(index3)
TomAugspurger/pandas
pandas/tests/indexes/period/test_join.py
pandas/core/arrays/_ranges.py
#!/usr/bin/env python3 """ process_file(filename) takes templated file .xxx.src and produces .xxx file where .xxx is .pyf .f90 or .f using the following template rules: '<..>' denotes a template. All function and subroutine blocks in a source file with names that contain '<..>' will be replicated according to the rules in '<..>'. The number of comma-separated words in '<..>' will determine the number of replicates. '<..>' may have two different forms, named and short. For example, named: <p=d,s,z,c> where anywhere inside a block '<p>' will be replaced with 'd', 's', 'z', and 'c' for each replicate of the block. <_c> is already defined: <_c=s,d,c,z> <_t> is already defined: <_t=real,double precision,complex,double complex> short: <s,d,c,z>, a short form of the named, useful when no <p> appears inside a block. In general, '<..>' contains a comma separated list of arbitrary expressions. If these expression must contain a comma|leftarrow|rightarrow, then prepend the comma|leftarrow|rightarrow with a backslash. If an expression matches '\\<index>' then it will be replaced by <index>-th expression. Note that all '<..>' forms in a block must have the same number of comma-separated entries. Predefined named template rules: <prefix=s,d,c,z> <ftype=real,double precision,complex,double complex> <ftypereal=real,double precision,\\0,\\1> <ctype=float,double,complex_float,complex_double> <ctypereal=float,double,\\0,\\1> """ __all__ = ['process_str', 'process_file'] import os import sys import re routine_start_re = re.compile(r'(\n|\A)(( (\$|\*))|)\s*(subroutine|function)\b', re.I) routine_end_re = re.compile(r'\n\s*end\s*(subroutine|function)\b.*(\n|\Z)', re.I) function_start_re = re.compile(r'\n (\$|\*)\s*function\b', re.I) def parse_structure(astr): """ Return a list of tuples for each function or subroutine each tuple is the start and end of a subroutine or function to be expanded. """ spanlist = [] ind = 0 while True: m = routine_start_re.search(astr, ind) if m is None: break start = m.start() if function_start_re.match(astr, start, m.end()): while True: i = astr.rfind('\n', ind, start) if i==-1: break start = i if astr[i:i+7]!='\n $': break start += 1 m = routine_end_re.search(astr, m.end()) ind = end = m and m.end()-1 or len(astr) spanlist.append((start, end)) return spanlist template_re = re.compile(r"<\s*(\w[\w\d]*)\s*>") named_re = re.compile(r"<\s*(\w[\w\d]*)\s*=\s*(.*?)\s*>") list_re = re.compile(r"<\s*((.*?))\s*>") def find_repl_patterns(astr): reps = named_re.findall(astr) names = {} for rep in reps: name = rep[0].strip() or unique_key(names) repl = rep[1].replace(r'\,', '@comma@') thelist = conv(repl) names[name] = thelist return names def find_and_remove_repl_patterns(astr): names = find_repl_patterns(astr) astr = re.subn(named_re, '', astr)[0] return astr, names item_re = re.compile(r"\A\\(?P<index>\d+)\Z") def conv(astr): b = astr.split(',') l = [x.strip() for x in b] for i in range(len(l)): m = item_re.match(l[i]) if m: j = int(m.group('index')) l[i] = l[j] return ','.join(l) def unique_key(adict): """ Obtain a unique key given a dictionary.""" allkeys = list(adict.keys()) done = False n = 1 while not done: newkey = '__l%s' % (n) if newkey in allkeys: n += 1 else: done = True return newkey template_name_re = re.compile(r'\A\s*(\w[\w\d]*)\s*\Z') def expand_sub(substr, names): substr = substr.replace(r'\>', '@rightarrow@') substr = substr.replace(r'\<', '@leftarrow@') lnames = find_repl_patterns(substr) substr = named_re.sub(r"<\1>", substr) # get rid of definition templates def listrepl(mobj): thelist = conv(mobj.group(1).replace(r'\,', '@comma@')) if template_name_re.match(thelist): return "<%s>" % (thelist) name = None for key in lnames.keys(): # see if list is already in dictionary if lnames[key] == thelist: name = key if name is None: # this list is not in the dictionary yet name = unique_key(lnames) lnames[name] = thelist return "<%s>" % name substr = list_re.sub(listrepl, substr) # convert all lists to named templates # newnames are constructed as needed numsubs = None base_rule = None rules = {} for r in template_re.findall(substr): if r not in rules: thelist = lnames.get(r, names.get(r, None)) if thelist is None: raise ValueError('No replicates found for <%s>' % (r)) if r not in names and not thelist.startswith('_'): names[r] = thelist rule = [i.replace('@comma@', ',') for i in thelist.split(',')] num = len(rule) if numsubs is None: numsubs = num rules[r] = rule base_rule = r elif num == numsubs: rules[r] = rule else: print("Mismatch in number of replacements (base <%s=%s>)" " for <%s=%s>. Ignoring." % (base_rule, ','.join(rules[base_rule]), r, thelist)) if not rules: return substr def namerepl(mobj): name = mobj.group(1) return rules.get(name, (k+1)*[name])[k] newstr = '' for k in range(numsubs): newstr += template_re.sub(namerepl, substr) + '\n\n' newstr = newstr.replace('@rightarrow@', '>') newstr = newstr.replace('@leftarrow@', '<') return newstr def process_str(allstr): newstr = allstr writestr = '' struct = parse_structure(newstr) oldend = 0 names = {} names.update(_special_names) for sub in struct: cleanedstr, defs = find_and_remove_repl_patterns(newstr[oldend:sub[0]]) writestr += cleanedstr names.update(defs) writestr += expand_sub(newstr[sub[0]:sub[1]], names) oldend = sub[1] writestr += newstr[oldend:] return writestr include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P<name>[\w\d./\\]+[.]src)['\"]", re.I) def resolve_includes(source): d = os.path.dirname(source) with open(source) as fid: lines = [] for line in fid: m = include_src_re.match(line) if m: fn = m.group('name') if not os.path.isabs(fn): fn = os.path.join(d, fn) if os.path.isfile(fn): print('Including file', fn) lines.extend(resolve_includes(fn)) else: lines.append(line) else: lines.append(line) return lines def process_file(source): lines = resolve_includes(source) return process_str(''.join(lines)) _special_names = find_repl_patterns(''' <_c=s,d,c,z> <_t=real,double precision,complex,double complex> <prefix=s,d,c,z> <ftype=real,double precision,complex,double complex> <ctype=float,double,complex_float,complex_double> <ftypereal=real,double precision,\\0,\\1> <ctypereal=float,double,\\0,\\1> ''') def main(): try: file = sys.argv[1] except IndexError: fid = sys.stdin outfile = sys.stdout else: fid = open(file, 'r') (base, ext) = os.path.splitext(file) newname = base outfile = open(newname, 'w') allstr = fid.read() writestr = process_str(allstr) outfile.write(writestr) if __name__ == "__main__": main()
# doctest r''' Test the .npy file format. Set up: >>> import sys >>> from io import BytesIO >>> from numpy.lib import format >>> >>> scalars = [ ... np.uint8, ... np.int8, ... np.uint16, ... np.int16, ... np.uint32, ... np.int32, ... np.uint64, ... np.int64, ... np.float32, ... np.float64, ... np.complex64, ... np.complex128, ... object, ... ] >>> >>> basic_arrays = [] >>> >>> for scalar in scalars: ... for endian in '<>': ... dtype = np.dtype(scalar).newbyteorder(endian) ... basic = np.arange(15).astype(dtype) ... basic_arrays.extend([ ... np.array([], dtype=dtype), ... np.array(10, dtype=dtype), ... basic, ... basic.reshape((3,5)), ... basic.reshape((3,5)).T, ... basic.reshape((3,5))[::-1,::2], ... ]) ... >>> >>> Pdescr = [ ... ('x', 'i4', (2,)), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> PbufferT = [ ... ([3,2], [[6.,4.],[6.,4.]], 8), ... ([4,3], [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> Ndescr = [ ... ('x', 'i4', (2,)), ... ('Info', [ ... ('value', 'c16'), ... ('y2', 'f8'), ... ('Info2', [ ... ('name', 'S2'), ... ('value', 'c16', (2,)), ... ('y3', 'f8', (2,)), ... ('z3', 'u4', (2,))]), ... ('name', 'S2'), ... ('z2', 'b1')]), ... ('color', 'S2'), ... ('info', [ ... ('Name', 'U8'), ... ('Value', 'c16')]), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> NbufferT = [ ... ([3,2], (6j, 6., ('nn', [6j,4j], [6.,4.], [1,2]), 'NN', True), 'cc', ('NN', 6j), [[6.,4.],[6.,4.]], 8), ... ([4,3], (7j, 7., ('oo', [7j,5j], [7.,5.], [2,1]), 'OO', False), 'dd', ('OO', 7j), [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> record_arrays = [ ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), ... ] Test the magic string writing. >>> format.magic(1, 0) '\x93NUMPY\x01\x00' >>> format.magic(0, 0) '\x93NUMPY\x00\x00' >>> format.magic(255, 255) '\x93NUMPY\xff\xff' >>> format.magic(2, 5) '\x93NUMPY\x02\x05' Test the magic string reading. >>> format.read_magic(BytesIO(format.magic(1, 0))) (1, 0) >>> format.read_magic(BytesIO(format.magic(0, 0))) (0, 0) >>> format.read_magic(BytesIO(format.magic(255, 255))) (255, 255) >>> format.read_magic(BytesIO(format.magic(2, 5))) (2, 5) Test the header writing. >>> for arr in basic_arrays + record_arrays: ... f = BytesIO() ... format.write_array_header_1_0(f, arr) # XXX: arr is not a dict, items gets called on it ... print(repr(f.getvalue())) ... "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "v\x00{'descr': [('x', '<i4', (2,)), ('y', '<f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '<i4', (2,)),\n ('Info',\n [('value', '<c16'),\n ('y2', '<f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '<c16', (2,)),\n ('y3', '<f8', (2,)),\n ('z3', '<u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '<U8'), ('Value', '<c16')]),\n ('y', '<f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "v\x00{'descr': [('x', '>i4', (2,)), ('y', '>f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '>i4', (2,)),\n ('Info',\n [('value', '>c16'),\n ('y2', '>f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '>c16', (2,)),\n ('y3', '>f8', (2,)),\n ('z3', '>u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '>U8'), ('Value', '>c16')]),\n ('y', '>f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" ''' import sys import os import shutil import tempfile import warnings import pytest from io import BytesIO import numpy as np from numpy.testing import ( assert_, assert_array_equal, assert_raises, assert_raises_regex, assert_warns ) from numpy.lib import format tempdir = None # Module-level setup. def setup_module(): global tempdir tempdir = tempfile.mkdtemp() def teardown_module(): global tempdir if tempdir is not None and os.path.isdir(tempdir): shutil.rmtree(tempdir) tempdir = None # Generate some basic arrays to test with. scalars = [ np.uint8, np.int8, np.uint16, np.int16, np.uint32, np.int32, np.uint64, np.int64, np.float32, np.float64, np.complex64, np.complex128, object, ] basic_arrays = [] for scalar in scalars: for endian in '<>': dtype = np.dtype(scalar).newbyteorder(endian) basic = np.arange(1500).astype(dtype) basic_arrays.extend([ # Empty np.array([], dtype=dtype), # Rank-0 np.array(10, dtype=dtype), # 1-D basic, # 2-D C-contiguous basic.reshape((30, 50)), # 2-D F-contiguous basic.reshape((30, 50)).T, # 2-D non-contiguous basic.reshape((30, 50))[::-1, ::2], ]) # More complicated record arrays. # This is the structure of the table used for plain objects: # # +-+-+-+ # |x|y|z| # +-+-+-+ # Structure of a plain array description: Pdescr = [ ('x', 'i4', (2,)), ('y', 'f8', (2, 2)), ('z', 'u1')] # A plain list of tuples with values for testing: PbufferT = [ # x y z ([3, 2], [[6., 4.], [6., 4.]], 8), ([4, 3], [[7., 5.], [7., 5.]], 9), ] # This is the structure of the table used for nested objects (DON'T PANIC!): # # +-+---------------------------------+-----+----------+-+-+ # |x|Info |color|info |y|z| # | +-----+--+----------------+----+--+ +----+-----+ | | # | |value|y2|Info2 |name|z2| |Name|Value| | | # | | | +----+-----+--+--+ | | | | | | | # | | | |name|value|y3|z3| | | | | | | | # +-+-----+--+----+-----+--+--+----+--+-----+----+-----+-+-+ # # The corresponding nested array description: Ndescr = [ ('x', 'i4', (2,)), ('Info', [ ('value', 'c16'), ('y2', 'f8'), ('Info2', [ ('name', 'S2'), ('value', 'c16', (2,)), ('y3', 'f8', (2,)), ('z3', 'u4', (2,))]), ('name', 'S2'), ('z2', 'b1')]), ('color', 'S2'), ('info', [ ('Name', 'U8'), ('Value', 'c16')]), ('y', 'f8', (2, 2)), ('z', 'u1')] NbufferT = [ # x Info color info y z # value y2 Info2 name z2 Name Value # name value y3 z3 ([3, 2], (6j, 6., ('nn', [6j, 4j], [6., 4.], [1, 2]), 'NN', True), 'cc', ('NN', 6j), [[6., 4.], [6., 4.]], 8), ([4, 3], (7j, 7., ('oo', [7j, 5j], [7., 5.], [2, 1]), 'OO', False), 'dd', ('OO', 7j), [[7., 5.], [7., 5.]], 9), ] record_arrays = [ np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), np.zeros(1, dtype=[('c', ('<f8', (5,)), (2,))]) ] #BytesIO that reads a random number of bytes at a time class BytesIOSRandomSize(BytesIO): def read(self, size=None): import random size = random.randint(1, size) return super(BytesIOSRandomSize, self).read(size) def roundtrip(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIO(f.getvalue()) arr2 = format.read_array(f2, allow_pickle=True) return arr2 def roundtrip_randsize(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIOSRandomSize(f.getvalue()) arr2 = format.read_array(f2) return arr2 def roundtrip_truncated(arr): f = BytesIO() format.write_array(f, arr) #BytesIO is one byte short f2 = BytesIO(f.getvalue()[0:-1]) arr2 = format.read_array(f2) return arr2 def assert_equal_(o1, o2): assert_(o1 == o2) def test_roundtrip(): for arr in basic_arrays + record_arrays: arr2 = roundtrip(arr) assert_array_equal(arr, arr2) def test_roundtrip_randsize(): for arr in basic_arrays + record_arrays: if arr.dtype != object: arr2 = roundtrip_randsize(arr) assert_array_equal(arr, arr2) def test_roundtrip_truncated(): for arr in basic_arrays: if arr.dtype != object: assert_raises(ValueError, roundtrip_truncated, arr) def test_long_str(): # check items larger than internal buffer size, gh-4027 long_str_arr = np.ones(1, dtype=np.dtype((str, format.BUFFER_SIZE + 1))) long_str_arr2 = roundtrip(long_str_arr) assert_array_equal(long_str_arr, long_str_arr2) @pytest.mark.slow def test_memmap_roundtrip(): # Fixme: used to crash on windows if not (sys.platform == 'win32' or sys.platform == 'cygwin'): for arr in basic_arrays + record_arrays: if arr.dtype.hasobject: # Skip these since they can't be mmap'ed. continue # Write it out normally and through mmap. nfn = os.path.join(tempdir, 'normal.npy') mfn = os.path.join(tempdir, 'memmap.npy') fp = open(nfn, 'wb') try: format.write_array(fp, arr) finally: fp.close() fortran_order = ( arr.flags.f_contiguous and not arr.flags.c_contiguous) ma = format.open_memmap(mfn, mode='w+', dtype=arr.dtype, shape=arr.shape, fortran_order=fortran_order) ma[...] = arr del ma # Check that both of these files' contents are the same. fp = open(nfn, 'rb') normal_bytes = fp.read() fp.close() fp = open(mfn, 'rb') memmap_bytes = fp.read() fp.close() assert_equal_(normal_bytes, memmap_bytes) # Check that reading the file using memmap works. ma = format.open_memmap(nfn, mode='r') del ma def test_compressed_roundtrip(): arr = np.random.rand(200, 200) npz_file = os.path.join(tempdir, 'compressed.npz') np.savez_compressed(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) # aligned dt1 = np.dtype('i1, i4, i1', align=True) # non-aligned, explicit offsets dt2 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6]}) # nested struct-in-struct dt3 = np.dtype({'names': ['c', 'd'], 'formats': ['i4', dt2]}) # field with '' name dt4 = np.dtype({'names': ['a', '', 'b'], 'formats': ['i4']*3}) # titles dt5 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6], 'titles': ['aa', 'bb']}) # empty dt6 = np.dtype({'names': [], 'formats': [], 'itemsize': 8}) @pytest.mark.parametrize("dt", [dt1, dt2, dt3, dt4, dt5, dt6]) def test_load_padded_dtype(dt): arr = np.zeros(3, dt) for i in range(3): arr[i] = i + 5 npz_file = os.path.join(tempdir, 'aligned.npz') np.savez(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) def test_python2_python3_interoperability(): fname = 'win64python2.npy' path = os.path.join(os.path.dirname(__file__), 'data', fname) data = np.load(path) assert_array_equal(data, np.ones(2)) def test_pickle_python2_python3(): # Test that loading object arrays saved on Python 2 works both on # Python 2 and Python 3 and vice versa data_dir = os.path.join(os.path.dirname(__file__), 'data') expected = np.array([None, range, u'\u512a\u826f', b'\xe4\xb8\x8d\xe8\x89\xaf'], dtype=object) for fname in ['py2-objarr.npy', 'py2-objarr.npz', 'py3-objarr.npy', 'py3-objarr.npz']: path = os.path.join(data_dir, fname) for encoding in ['bytes', 'latin1']: data_f = np.load(path, allow_pickle=True, encoding=encoding) if fname.endswith('.npz'): data = data_f['x'] data_f.close() else: data = data_f if encoding == 'latin1' and fname.startswith('py2'): assert_(isinstance(data[3], str)) assert_array_equal(data[:-1], expected[:-1]) # mojibake occurs assert_array_equal(data[-1].encode(encoding), expected[-1]) else: assert_(isinstance(data[3], bytes)) assert_array_equal(data, expected) if fname.startswith('py2'): if fname.endswith('.npz'): data = np.load(path, allow_pickle=True) assert_raises(UnicodeError, data.__getitem__, 'x') data.close() data = np.load(path, allow_pickle=True, fix_imports=False, encoding='latin1') assert_raises(ImportError, data.__getitem__, 'x') data.close() else: assert_raises(UnicodeError, np.load, path, allow_pickle=True) assert_raises(ImportError, np.load, path, allow_pickle=True, fix_imports=False, encoding='latin1') def test_pickle_disallow(): data_dir = os.path.join(os.path.dirname(__file__), 'data') path = os.path.join(data_dir, 'py2-objarr.npy') assert_raises(ValueError, np.load, path, allow_pickle=False, encoding='latin1') path = os.path.join(data_dir, 'py2-objarr.npz') f = np.load(path, allow_pickle=False, encoding='latin1') assert_raises(ValueError, f.__getitem__, 'x') path = os.path.join(tempdir, 'pickle-disabled.npy') assert_raises(ValueError, np.save, path, np.array([None], dtype=object), allow_pickle=False) @pytest.mark.parametrize('dt', [ np.dtype(np.dtype([('a', np.int8), ('b', np.int16), ('c', np.int32), ], align=True), (3,)), np.dtype([('x', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8, }, (3,)), (4,), )]), np.dtype([('x', ('<f8', (5,)), (2,), )]), np.dtype([('x', np.dtype(( np.dtype(( np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8}), (3,) )), (4,) ))) ]), np.dtype([ ('a', np.dtype(( np.dtype(( np.dtype(( np.dtype([ ('a', int), ('b', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8})), ]), (3,), )), (4,), )), (5,), ))) ]), ]) def test_descr_to_dtype(dt): dt1 = format.descr_to_dtype(dt.descr) assert_equal_(dt1, dt) arr1 = np.zeros(3, dt) arr2 = roundtrip(arr1) assert_array_equal(arr1, arr2) def test_version_2_0(): f = BytesIO() # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) format.write_array(f, d, version=(2, 0)) with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) format.write_array(f, d) assert_(w[0].category is UserWarning) # check alignment of data portion f.seek(0) header = f.readline() assert_(len(header) % format.ARRAY_ALIGN == 0) f.seek(0) n = format.read_array(f) assert_array_equal(d, n) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.write_array, f, d, (1, 0)) @pytest.mark.slow def test_version_2_0_memmap(): # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) tf = tempfile.mktemp('', 'mmap', dir=tempdir) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.open_memmap, tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(1, 0)) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(2, 0)) ma[...] = d del ma with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=None) assert_(w[0].category is UserWarning) ma[...] = d del ma ma = format.open_memmap(tf, mode='r') assert_array_equal(ma, d) def test_write_version(): f = BytesIO() arr = np.arange(1) # These should pass. format.write_array(f, arr, version=(1, 0)) format.write_array(f, arr) format.write_array(f, arr, version=None) format.write_array(f, arr) format.write_array(f, arr, version=(2, 0)) format.write_array(f, arr) # These should all fail. bad_versions = [ (1, 1), (0, 0), (0, 1), (2, 2), (255, 255), ] for version in bad_versions: with assert_raises_regex(ValueError, 'we only support format version.*'): format.write_array(f, arr, version=version) bad_version_magic = [ b'\x93NUMPY\x01\x01', b'\x93NUMPY\x00\x00', b'\x93NUMPY\x00\x01', b'\x93NUMPY\x02\x00', b'\x93NUMPY\x02\x02', b'\x93NUMPY\xff\xff', ] malformed_magic = [ b'\x92NUMPY\x01\x00', b'\x00NUMPY\x01\x00', b'\x93numpy\x01\x00', b'\x93MATLB\x01\x00', b'\x93NUMPY\x01', b'\x93NUMPY', b'', ] def test_read_magic(): s1 = BytesIO() s2 = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s1, arr, version=(1, 0)) format.write_array(s2, arr, version=(2, 0)) s1.seek(0) s2.seek(0) version1 = format.read_magic(s1) version2 = format.read_magic(s2) assert_(version1 == (1, 0)) assert_(version2 == (2, 0)) assert_(s1.tell() == format.MAGIC_LEN) assert_(s2.tell() == format.MAGIC_LEN) def test_read_magic_bad_magic(): for magic in malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_read_version_1_0_bad_magic(): for magic in bad_version_magic + malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_bad_magic_args(): assert_raises(ValueError, format.magic, -1, 1) assert_raises(ValueError, format.magic, 256, 1) assert_raises(ValueError, format.magic, 1, -1) assert_raises(ValueError, format.magic, 1, 256) def test_large_header(): s = BytesIO() d = {'a': 1, 'b': 2} format.write_array_header_1_0(s, d) s = BytesIO() d = {'a': 1, 'b': 2, 'c': 'x'*256*256} assert_raises(ValueError, format.write_array_header_1_0, s, d) def test_read_array_header_1_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(1, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_1_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_read_array_header_2_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(2, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_2_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_bad_header(): # header of length less than 2 should fail s = BytesIO() assert_raises(ValueError, format.read_array_header_1_0, s) s = BytesIO(b'1') assert_raises(ValueError, format.read_array_header_1_0, s) # header shorter than indicated size should fail s = BytesIO(b'\x01\x00') assert_raises(ValueError, format.read_array_header_1_0, s) # headers without the exact keys required should fail d = {"shape": (1, 2), "descr": "x"} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) d = {"shape": (1, 2), "fortran_order": False, "descr": "x", "extrakey": -1} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) def test_large_file_support(): if (sys.platform == 'win32' or sys.platform == 'cygwin'): pytest.skip("Unknown if Windows has sparse filesystems") # try creating a large sparse file tf_name = os.path.join(tempdir, 'sparse_file') try: # seek past end would work too, but linux truncate somewhat # increases the chances that we have a sparse filesystem and can # avoid actually writing 5GB import subprocess as sp sp.check_call(["truncate", "-s", "5368709120", tf_name]) except Exception: pytest.skip("Could not create 5GB large file") # write a small array to the end with open(tf_name, "wb") as f: f.seek(5368709120) d = np.arange(5) np.save(f, d) # read it back with open(tf_name, "rb") as f: f.seek(5368709120) r = np.load(f) assert_array_equal(r, d) @pytest.mark.skipif(np.dtype(np.intp).itemsize < 8, reason="test requires 64-bit system") @pytest.mark.slow def test_large_archive(): # Regression test for product of saving arrays with dimensions of array # having a product that doesn't fit in int32. See gh-7598 for details. try: a = np.empty((2**30, 2), dtype=np.uint8) except MemoryError: pytest.skip("Could not create large file") fname = os.path.join(tempdir, "large_archive") with open(fname, "wb") as f: np.savez(f, arr=a) with open(fname, "rb") as f: new_a = np.load(f)["arr"] assert_(a.shape == new_a.shape) def test_empty_npz(): # Test for gh-9989 fname = os.path.join(tempdir, "nothing.npz") np.savez(fname) np.load(fname) def test_unicode_field_names(): # gh-7391 arr = np.array([ (1, 3), (1, 2), (1, 3), (1, 2) ], dtype=[ ('int', int), (u'\N{CJK UNIFIED IDEOGRAPH-6574}\N{CJK UNIFIED IDEOGRAPH-5F62}', int) ]) fname = os.path.join(tempdir, "unicode.npy") with open(fname, 'wb') as f: format.write_array(f, arr, version=(3, 0)) with open(fname, 'rb') as f: arr2 = format.read_array(f) assert_array_equal(arr, arr2) # notifies the user that 3.0 is selected with open(fname, 'wb') as f: with assert_warns(UserWarning): format.write_array(f, arr, version=None) @pytest.mark.parametrize('dt, fail', [ (np.dtype({'names': ['a', 'b'], 'formats': [float, np.dtype('S3', metadata={'some': 'stuff'})]}), True), (np.dtype(int, metadata={'some': 'stuff'}), False), (np.dtype([('subarray', (int, (2,)))], metadata={'some': 'stuff'}), False), # recursive: metadata on the field of a dtype (np.dtype({'names': ['a', 'b'], 'formats': [ float, np.dtype({'names': ['c'], 'formats': [np.dtype(int, metadata={})]}) ]}), False) ]) def test_metadata_dtype(dt, fail): # gh-14142 arr = np.ones(10, dtype=dt) buf = BytesIO() with assert_warns(UserWarning): np.save(buf, arr) buf.seek(0) if fail: with assert_raises(ValueError): np.load(buf) else: arr2 = np.load(buf) # BUG: assert_array_equal does not check metadata from numpy.lib.format import _has_metadata assert_array_equal(arr, arr2) assert _has_metadata(arr.dtype) assert not _has_metadata(arr2.dtype)
simongibbons/numpy
numpy/lib/tests/test_format.py
numpy/distutils/from_template.py
"""Tests for laguerre module. """ from functools import reduce import numpy as np import numpy.polynomial.laguerre as lag from numpy.polynomial.polynomial import polyval from numpy.testing import ( assert_almost_equal, assert_raises, assert_equal, assert_, ) L0 = np.array([1])/1 L1 = np.array([1, -1])/1 L2 = np.array([2, -4, 1])/2 L3 = np.array([6, -18, 9, -1])/6 L4 = np.array([24, -96, 72, -16, 1])/24 L5 = np.array([120, -600, 600, -200, 25, -1])/120 L6 = np.array([720, -4320, 5400, -2400, 450, -36, 1])/720 Llist = [L0, L1, L2, L3, L4, L5, L6] def trim(x): return lag.lagtrim(x, tol=1e-6) class TestConstants: def test_lagdomain(self): assert_equal(lag.lagdomain, [0, 1]) def test_lagzero(self): assert_equal(lag.lagzero, [0]) def test_lagone(self): assert_equal(lag.lagone, [1]) def test_lagx(self): assert_equal(lag.lagx, [1, -1]) class TestArithmetic: x = np.linspace(-3, 3, 100) def test_lagadd(self): for i in range(5): for j in range(5): msg = f"At i={i}, j={j}" tgt = np.zeros(max(i, j) + 1) tgt[i] += 1 tgt[j] += 1 res = lag.lagadd([0]*i + [1], [0]*j + [1]) assert_equal(trim(res), trim(tgt), err_msg=msg) def test_lagsub(self): for i in range(5): for j in range(5): msg = f"At i={i}, j={j}" tgt = np.zeros(max(i, j) + 1) tgt[i] += 1 tgt[j] -= 1 res = lag.lagsub([0]*i + [1], [0]*j + [1]) assert_equal(trim(res), trim(tgt), err_msg=msg) def test_lagmulx(self): assert_equal(lag.lagmulx([0]), [0]) assert_equal(lag.lagmulx([1]), [1, -1]) for i in range(1, 5): ser = [0]*i + [1] tgt = [0]*(i - 1) + [-i, 2*i + 1, -(i + 1)] assert_almost_equal(lag.lagmulx(ser), tgt) def test_lagmul(self): # check values of result for i in range(5): pol1 = [0]*i + [1] val1 = lag.lagval(self.x, pol1) for j in range(5): msg = f"At i={i}, j={j}" pol2 = [0]*j + [1] val2 = lag.lagval(self.x, pol2) pol3 = lag.lagmul(pol1, pol2) val3 = lag.lagval(self.x, pol3) assert_(len(pol3) == i + j + 1, msg) assert_almost_equal(val3, val1*val2, err_msg=msg) def test_lagdiv(self): for i in range(5): for j in range(5): msg = f"At i={i}, j={j}" ci = [0]*i + [1] cj = [0]*j + [1] tgt = lag.lagadd(ci, cj) quo, rem = lag.lagdiv(tgt, ci) res = lag.lagadd(lag.lagmul(quo, ci), rem) assert_almost_equal(trim(res), trim(tgt), err_msg=msg) def test_lagpow(self): for i in range(5): for j in range(5): msg = f"At i={i}, j={j}" c = np.arange(i + 1) tgt = reduce(lag.lagmul, [c]*j, np.array([1])) res = lag.lagpow(c, j) assert_equal(trim(res), trim(tgt), err_msg=msg) class TestEvaluation: # coefficients of 1 + 2*x + 3*x**2 c1d = np.array([9., -14., 6.]) c2d = np.einsum('i,j->ij', c1d, c1d) c3d = np.einsum('i,j,k->ijk', c1d, c1d, c1d) # some random values in [-1, 1) x = np.random.random((3, 5))*2 - 1 y = polyval(x, [1., 2., 3.]) def test_lagval(self): #check empty input assert_equal(lag.lagval([], [1]).size, 0) #check normal input) x = np.linspace(-1, 1) y = [polyval(x, c) for c in Llist] for i in range(7): msg = f"At i={i}" tgt = y[i] res = lag.lagval(x, [0]*i + [1]) assert_almost_equal(res, tgt, err_msg=msg) #check that shape is preserved for i in range(3): dims = [2]*i x = np.zeros(dims) assert_equal(lag.lagval(x, [1]).shape, dims) assert_equal(lag.lagval(x, [1, 0]).shape, dims) assert_equal(lag.lagval(x, [1, 0, 0]).shape, dims) def test_lagval2d(self): x1, x2, x3 = self.x y1, y2, y3 = self.y #test exceptions assert_raises(ValueError, lag.lagval2d, x1, x2[:2], self.c2d) #test values tgt = y1*y2 res = lag.lagval2d(x1, x2, self.c2d) assert_almost_equal(res, tgt) #test shape z = np.ones((2, 3)) res = lag.lagval2d(z, z, self.c2d) assert_(res.shape == (2, 3)) def test_lagval3d(self): x1, x2, x3 = self.x y1, y2, y3 = self.y #test exceptions assert_raises(ValueError, lag.lagval3d, x1, x2, x3[:2], self.c3d) #test values tgt = y1*y2*y3 res = lag.lagval3d(x1, x2, x3, self.c3d) assert_almost_equal(res, tgt) #test shape z = np.ones((2, 3)) res = lag.lagval3d(z, z, z, self.c3d) assert_(res.shape == (2, 3)) def test_laggrid2d(self): x1, x2, x3 = self.x y1, y2, y3 = self.y #test values tgt = np.einsum('i,j->ij', y1, y2) res = lag.laggrid2d(x1, x2, self.c2d) assert_almost_equal(res, tgt) #test shape z = np.ones((2, 3)) res = lag.laggrid2d(z, z, self.c2d) assert_(res.shape == (2, 3)*2) def test_laggrid3d(self): x1, x2, x3 = self.x y1, y2, y3 = self.y #test values tgt = np.einsum('i,j,k->ijk', y1, y2, y3) res = lag.laggrid3d(x1, x2, x3, self.c3d) assert_almost_equal(res, tgt) #test shape z = np.ones((2, 3)) res = lag.laggrid3d(z, z, z, self.c3d) assert_(res.shape == (2, 3)*3) class TestIntegral: def test_lagint(self): # check exceptions assert_raises(TypeError, lag.lagint, [0], .5) assert_raises(ValueError, lag.lagint, [0], -1) assert_raises(ValueError, lag.lagint, [0], 1, [0, 0]) assert_raises(ValueError, lag.lagint, [0], lbnd=[0]) assert_raises(ValueError, lag.lagint, [0], scl=[0]) assert_raises(TypeError, lag.lagint, [0], axis=.5) # test integration of zero polynomial for i in range(2, 5): k = [0]*(i - 2) + [1] res = lag.lagint([0], m=i, k=k) assert_almost_equal(res, [1, -1]) # check single integration with integration constant for i in range(5): scl = i + 1 pol = [0]*i + [1] tgt = [i] + [0]*i + [1/scl] lagpol = lag.poly2lag(pol) lagint = lag.lagint(lagpol, m=1, k=[i]) res = lag.lag2poly(lagint) assert_almost_equal(trim(res), trim(tgt)) # check single integration with integration constant and lbnd for i in range(5): scl = i + 1 pol = [0]*i + [1] lagpol = lag.poly2lag(pol) lagint = lag.lagint(lagpol, m=1, k=[i], lbnd=-1) assert_almost_equal(lag.lagval(-1, lagint), i) # check single integration with integration constant and scaling for i in range(5): scl = i + 1 pol = [0]*i + [1] tgt = [i] + [0]*i + [2/scl] lagpol = lag.poly2lag(pol) lagint = lag.lagint(lagpol, m=1, k=[i], scl=2) res = lag.lag2poly(lagint) assert_almost_equal(trim(res), trim(tgt)) # check multiple integrations with default k for i in range(5): for j in range(2, 5): pol = [0]*i + [1] tgt = pol[:] for k in range(j): tgt = lag.lagint(tgt, m=1) res = lag.lagint(pol, m=j) assert_almost_equal(trim(res), trim(tgt)) # check multiple integrations with defined k for i in range(5): for j in range(2, 5): pol = [0]*i + [1] tgt = pol[:] for k in range(j): tgt = lag.lagint(tgt, m=1, k=[k]) res = lag.lagint(pol, m=j, k=list(range(j))) assert_almost_equal(trim(res), trim(tgt)) # check multiple integrations with lbnd for i in range(5): for j in range(2, 5): pol = [0]*i + [1] tgt = pol[:] for k in range(j): tgt = lag.lagint(tgt, m=1, k=[k], lbnd=-1) res = lag.lagint(pol, m=j, k=list(range(j)), lbnd=-1) assert_almost_equal(trim(res), trim(tgt)) # check multiple integrations with scaling for i in range(5): for j in range(2, 5): pol = [0]*i + [1] tgt = pol[:] for k in range(j): tgt = lag.lagint(tgt, m=1, k=[k], scl=2) res = lag.lagint(pol, m=j, k=list(range(j)), scl=2) assert_almost_equal(trim(res), trim(tgt)) def test_lagint_axis(self): # check that axis keyword works c2d = np.random.random((3, 4)) tgt = np.vstack([lag.lagint(c) for c in c2d.T]).T res = lag.lagint(c2d, axis=0) assert_almost_equal(res, tgt) tgt = np.vstack([lag.lagint(c) for c in c2d]) res = lag.lagint(c2d, axis=1) assert_almost_equal(res, tgt) tgt = np.vstack([lag.lagint(c, k=3) for c in c2d]) res = lag.lagint(c2d, k=3, axis=1) assert_almost_equal(res, tgt) class TestDerivative: def test_lagder(self): # check exceptions assert_raises(TypeError, lag.lagder, [0], .5) assert_raises(ValueError, lag.lagder, [0], -1) # check that zeroth derivative does nothing for i in range(5): tgt = [0]*i + [1] res = lag.lagder(tgt, m=0) assert_equal(trim(res), trim(tgt)) # check that derivation is the inverse of integration for i in range(5): for j in range(2, 5): tgt = [0]*i + [1] res = lag.lagder(lag.lagint(tgt, m=j), m=j) assert_almost_equal(trim(res), trim(tgt)) # check derivation with scaling for i in range(5): for j in range(2, 5): tgt = [0]*i + [1] res = lag.lagder(lag.lagint(tgt, m=j, scl=2), m=j, scl=.5) assert_almost_equal(trim(res), trim(tgt)) def test_lagder_axis(self): # check that axis keyword works c2d = np.random.random((3, 4)) tgt = np.vstack([lag.lagder(c) for c in c2d.T]).T res = lag.lagder(c2d, axis=0) assert_almost_equal(res, tgt) tgt = np.vstack([lag.lagder(c) for c in c2d]) res = lag.lagder(c2d, axis=1) assert_almost_equal(res, tgt) class TestVander: # some random values in [-1, 1) x = np.random.random((3, 5))*2 - 1 def test_lagvander(self): # check for 1d x x = np.arange(3) v = lag.lagvander(x, 3) assert_(v.shape == (3, 4)) for i in range(4): coef = [0]*i + [1] assert_almost_equal(v[..., i], lag.lagval(x, coef)) # check for 2d x x = np.array([[1, 2], [3, 4], [5, 6]]) v = lag.lagvander(x, 3) assert_(v.shape == (3, 2, 4)) for i in range(4): coef = [0]*i + [1] assert_almost_equal(v[..., i], lag.lagval(x, coef)) def test_lagvander2d(self): # also tests lagval2d for non-square coefficient array x1, x2, x3 = self.x c = np.random.random((2, 3)) van = lag.lagvander2d(x1, x2, [1, 2]) tgt = lag.lagval2d(x1, x2, c) res = np.dot(van, c.flat) assert_almost_equal(res, tgt) # check shape van = lag.lagvander2d([x1], [x2], [1, 2]) assert_(van.shape == (1, 5, 6)) def test_lagvander3d(self): # also tests lagval3d for non-square coefficient array x1, x2, x3 = self.x c = np.random.random((2, 3, 4)) van = lag.lagvander3d(x1, x2, x3, [1, 2, 3]) tgt = lag.lagval3d(x1, x2, x3, c) res = np.dot(van, c.flat) assert_almost_equal(res, tgt) # check shape van = lag.lagvander3d([x1], [x2], [x3], [1, 2, 3]) assert_(van.shape == (1, 5, 24)) class TestFitting: def test_lagfit(self): def f(x): return x*(x - 1)*(x - 2) # Test exceptions assert_raises(ValueError, lag.lagfit, [1], [1], -1) assert_raises(TypeError, lag.lagfit, [[1]], [1], 0) assert_raises(TypeError, lag.lagfit, [], [1], 0) assert_raises(TypeError, lag.lagfit, [1], [[[1]]], 0) assert_raises(TypeError, lag.lagfit, [1, 2], [1], 0) assert_raises(TypeError, lag.lagfit, [1], [1, 2], 0) assert_raises(TypeError, lag.lagfit, [1], [1], 0, w=[[1]]) assert_raises(TypeError, lag.lagfit, [1], [1], 0, w=[1, 1]) assert_raises(ValueError, lag.lagfit, [1], [1], [-1,]) assert_raises(ValueError, lag.lagfit, [1], [1], [2, -1, 6]) assert_raises(TypeError, lag.lagfit, [1], [1], []) # Test fit x = np.linspace(0, 2) y = f(x) # coef3 = lag.lagfit(x, y, 3) assert_equal(len(coef3), 4) assert_almost_equal(lag.lagval(x, coef3), y) coef3 = lag.lagfit(x, y, [0, 1, 2, 3]) assert_equal(len(coef3), 4) assert_almost_equal(lag.lagval(x, coef3), y) # coef4 = lag.lagfit(x, y, 4) assert_equal(len(coef4), 5) assert_almost_equal(lag.lagval(x, coef4), y) coef4 = lag.lagfit(x, y, [0, 1, 2, 3, 4]) assert_equal(len(coef4), 5) assert_almost_equal(lag.lagval(x, coef4), y) # coef2d = lag.lagfit(x, np.array([y, y]).T, 3) assert_almost_equal(coef2d, np.array([coef3, coef3]).T) coef2d = lag.lagfit(x, np.array([y, y]).T, [0, 1, 2, 3]) assert_almost_equal(coef2d, np.array([coef3, coef3]).T) # test weighting w = np.zeros_like(x) yw = y.copy() w[1::2] = 1 y[0::2] = 0 wcoef3 = lag.lagfit(x, yw, 3, w=w) assert_almost_equal(wcoef3, coef3) wcoef3 = lag.lagfit(x, yw, [0, 1, 2, 3], w=w) assert_almost_equal(wcoef3, coef3) # wcoef2d = lag.lagfit(x, np.array([yw, yw]).T, 3, w=w) assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T) wcoef2d = lag.lagfit(x, np.array([yw, yw]).T, [0, 1, 2, 3], w=w) assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T) # test scaling with complex values x points whose square # is zero when summed. x = [1, 1j, -1, -1j] assert_almost_equal(lag.lagfit(x, x, 1), [1, -1]) assert_almost_equal(lag.lagfit(x, x, [0, 1]), [1, -1]) class TestCompanion: def test_raises(self): assert_raises(ValueError, lag.lagcompanion, []) assert_raises(ValueError, lag.lagcompanion, [1]) def test_dimensions(self): for i in range(1, 5): coef = [0]*i + [1] assert_(lag.lagcompanion(coef).shape == (i, i)) def test_linear_root(self): assert_(lag.lagcompanion([1, 2])[0, 0] == 1.5) class TestGauss: def test_100(self): x, w = lag.laggauss(100) # test orthogonality. Note that the results need to be normalized, # otherwise the huge values that can arise from fast growing # functions like Laguerre can be very confusing. v = lag.lagvander(x, 99) vv = np.dot(v.T * w, v) vd = 1/np.sqrt(vv.diagonal()) vv = vd[:, None] * vv * vd assert_almost_equal(vv, np.eye(100)) # check that the integral of 1 is correct tgt = 1.0 assert_almost_equal(w.sum(), tgt) class TestMisc: def test_lagfromroots(self): res = lag.lagfromroots([]) assert_almost_equal(trim(res), [1]) for i in range(1, 5): roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2]) pol = lag.lagfromroots(roots) res = lag.lagval(roots, pol) tgt = 0 assert_(len(pol) == i + 1) assert_almost_equal(lag.lag2poly(pol)[-1], 1) assert_almost_equal(res, tgt) def test_lagroots(self): assert_almost_equal(lag.lagroots([1]), []) assert_almost_equal(lag.lagroots([0, 1]), [1]) for i in range(2, 5): tgt = np.linspace(0, 3, i) res = lag.lagroots(lag.lagfromroots(tgt)) assert_almost_equal(trim(res), trim(tgt)) def test_lagtrim(self): coef = [2, -1, 1, 0] # Test exceptions assert_raises(ValueError, lag.lagtrim, coef, -1) # Test results assert_equal(lag.lagtrim(coef), coef[:-1]) assert_equal(lag.lagtrim(coef, 1), coef[:-3]) assert_equal(lag.lagtrim(coef, 2), [0]) def test_lagline(self): assert_equal(lag.lagline(3, 4), [7, -4]) def test_lag2poly(self): for i in range(7): assert_almost_equal(lag.lag2poly([0]*i + [1]), Llist[i]) def test_poly2lag(self): for i in range(7): assert_almost_equal(lag.poly2lag(Llist[i]), [0]*i + [1]) def test_weight(self): x = np.linspace(0, 10, 11) tgt = np.exp(-x) res = lag.lagweight(x) assert_almost_equal(res, tgt)
# doctest r''' Test the .npy file format. Set up: >>> import sys >>> from io import BytesIO >>> from numpy.lib import format >>> >>> scalars = [ ... np.uint8, ... np.int8, ... np.uint16, ... np.int16, ... np.uint32, ... np.int32, ... np.uint64, ... np.int64, ... np.float32, ... np.float64, ... np.complex64, ... np.complex128, ... object, ... ] >>> >>> basic_arrays = [] >>> >>> for scalar in scalars: ... for endian in '<>': ... dtype = np.dtype(scalar).newbyteorder(endian) ... basic = np.arange(15).astype(dtype) ... basic_arrays.extend([ ... np.array([], dtype=dtype), ... np.array(10, dtype=dtype), ... basic, ... basic.reshape((3,5)), ... basic.reshape((3,5)).T, ... basic.reshape((3,5))[::-1,::2], ... ]) ... >>> >>> Pdescr = [ ... ('x', 'i4', (2,)), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> PbufferT = [ ... ([3,2], [[6.,4.],[6.,4.]], 8), ... ([4,3], [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> Ndescr = [ ... ('x', 'i4', (2,)), ... ('Info', [ ... ('value', 'c16'), ... ('y2', 'f8'), ... ('Info2', [ ... ('name', 'S2'), ... ('value', 'c16', (2,)), ... ('y3', 'f8', (2,)), ... ('z3', 'u4', (2,))]), ... ('name', 'S2'), ... ('z2', 'b1')]), ... ('color', 'S2'), ... ('info', [ ... ('Name', 'U8'), ... ('Value', 'c16')]), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> NbufferT = [ ... ([3,2], (6j, 6., ('nn', [6j,4j], [6.,4.], [1,2]), 'NN', True), 'cc', ('NN', 6j), [[6.,4.],[6.,4.]], 8), ... ([4,3], (7j, 7., ('oo', [7j,5j], [7.,5.], [2,1]), 'OO', False), 'dd', ('OO', 7j), [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> record_arrays = [ ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), ... ] Test the magic string writing. >>> format.magic(1, 0) '\x93NUMPY\x01\x00' >>> format.magic(0, 0) '\x93NUMPY\x00\x00' >>> format.magic(255, 255) '\x93NUMPY\xff\xff' >>> format.magic(2, 5) '\x93NUMPY\x02\x05' Test the magic string reading. >>> format.read_magic(BytesIO(format.magic(1, 0))) (1, 0) >>> format.read_magic(BytesIO(format.magic(0, 0))) (0, 0) >>> format.read_magic(BytesIO(format.magic(255, 255))) (255, 255) >>> format.read_magic(BytesIO(format.magic(2, 5))) (2, 5) Test the header writing. >>> for arr in basic_arrays + record_arrays: ... f = BytesIO() ... format.write_array_header_1_0(f, arr) # XXX: arr is not a dict, items gets called on it ... print(repr(f.getvalue())) ... "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "v\x00{'descr': [('x', '<i4', (2,)), ('y', '<f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '<i4', (2,)),\n ('Info',\n [('value', '<c16'),\n ('y2', '<f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '<c16', (2,)),\n ('y3', '<f8', (2,)),\n ('z3', '<u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '<U8'), ('Value', '<c16')]),\n ('y', '<f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "v\x00{'descr': [('x', '>i4', (2,)), ('y', '>f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '>i4', (2,)),\n ('Info',\n [('value', '>c16'),\n ('y2', '>f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '>c16', (2,)),\n ('y3', '>f8', (2,)),\n ('z3', '>u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '>U8'), ('Value', '>c16')]),\n ('y', '>f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" ''' import sys import os import shutil import tempfile import warnings import pytest from io import BytesIO import numpy as np from numpy.testing import ( assert_, assert_array_equal, assert_raises, assert_raises_regex, assert_warns ) from numpy.lib import format tempdir = None # Module-level setup. def setup_module(): global tempdir tempdir = tempfile.mkdtemp() def teardown_module(): global tempdir if tempdir is not None and os.path.isdir(tempdir): shutil.rmtree(tempdir) tempdir = None # Generate some basic arrays to test with. scalars = [ np.uint8, np.int8, np.uint16, np.int16, np.uint32, np.int32, np.uint64, np.int64, np.float32, np.float64, np.complex64, np.complex128, object, ] basic_arrays = [] for scalar in scalars: for endian in '<>': dtype = np.dtype(scalar).newbyteorder(endian) basic = np.arange(1500).astype(dtype) basic_arrays.extend([ # Empty np.array([], dtype=dtype), # Rank-0 np.array(10, dtype=dtype), # 1-D basic, # 2-D C-contiguous basic.reshape((30, 50)), # 2-D F-contiguous basic.reshape((30, 50)).T, # 2-D non-contiguous basic.reshape((30, 50))[::-1, ::2], ]) # More complicated record arrays. # This is the structure of the table used for plain objects: # # +-+-+-+ # |x|y|z| # +-+-+-+ # Structure of a plain array description: Pdescr = [ ('x', 'i4', (2,)), ('y', 'f8', (2, 2)), ('z', 'u1')] # A plain list of tuples with values for testing: PbufferT = [ # x y z ([3, 2], [[6., 4.], [6., 4.]], 8), ([4, 3], [[7., 5.], [7., 5.]], 9), ] # This is the structure of the table used for nested objects (DON'T PANIC!): # # +-+---------------------------------+-----+----------+-+-+ # |x|Info |color|info |y|z| # | +-----+--+----------------+----+--+ +----+-----+ | | # | |value|y2|Info2 |name|z2| |Name|Value| | | # | | | +----+-----+--+--+ | | | | | | | # | | | |name|value|y3|z3| | | | | | | | # +-+-----+--+----+-----+--+--+----+--+-----+----+-----+-+-+ # # The corresponding nested array description: Ndescr = [ ('x', 'i4', (2,)), ('Info', [ ('value', 'c16'), ('y2', 'f8'), ('Info2', [ ('name', 'S2'), ('value', 'c16', (2,)), ('y3', 'f8', (2,)), ('z3', 'u4', (2,))]), ('name', 'S2'), ('z2', 'b1')]), ('color', 'S2'), ('info', [ ('Name', 'U8'), ('Value', 'c16')]), ('y', 'f8', (2, 2)), ('z', 'u1')] NbufferT = [ # x Info color info y z # value y2 Info2 name z2 Name Value # name value y3 z3 ([3, 2], (6j, 6., ('nn', [6j, 4j], [6., 4.], [1, 2]), 'NN', True), 'cc', ('NN', 6j), [[6., 4.], [6., 4.]], 8), ([4, 3], (7j, 7., ('oo', [7j, 5j], [7., 5.], [2, 1]), 'OO', False), 'dd', ('OO', 7j), [[7., 5.], [7., 5.]], 9), ] record_arrays = [ np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), np.zeros(1, dtype=[('c', ('<f8', (5,)), (2,))]) ] #BytesIO that reads a random number of bytes at a time class BytesIOSRandomSize(BytesIO): def read(self, size=None): import random size = random.randint(1, size) return super(BytesIOSRandomSize, self).read(size) def roundtrip(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIO(f.getvalue()) arr2 = format.read_array(f2, allow_pickle=True) return arr2 def roundtrip_randsize(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIOSRandomSize(f.getvalue()) arr2 = format.read_array(f2) return arr2 def roundtrip_truncated(arr): f = BytesIO() format.write_array(f, arr) #BytesIO is one byte short f2 = BytesIO(f.getvalue()[0:-1]) arr2 = format.read_array(f2) return arr2 def assert_equal_(o1, o2): assert_(o1 == o2) def test_roundtrip(): for arr in basic_arrays + record_arrays: arr2 = roundtrip(arr) assert_array_equal(arr, arr2) def test_roundtrip_randsize(): for arr in basic_arrays + record_arrays: if arr.dtype != object: arr2 = roundtrip_randsize(arr) assert_array_equal(arr, arr2) def test_roundtrip_truncated(): for arr in basic_arrays: if arr.dtype != object: assert_raises(ValueError, roundtrip_truncated, arr) def test_long_str(): # check items larger than internal buffer size, gh-4027 long_str_arr = np.ones(1, dtype=np.dtype((str, format.BUFFER_SIZE + 1))) long_str_arr2 = roundtrip(long_str_arr) assert_array_equal(long_str_arr, long_str_arr2) @pytest.mark.slow def test_memmap_roundtrip(): # Fixme: used to crash on windows if not (sys.platform == 'win32' or sys.platform == 'cygwin'): for arr in basic_arrays + record_arrays: if arr.dtype.hasobject: # Skip these since they can't be mmap'ed. continue # Write it out normally and through mmap. nfn = os.path.join(tempdir, 'normal.npy') mfn = os.path.join(tempdir, 'memmap.npy') fp = open(nfn, 'wb') try: format.write_array(fp, arr) finally: fp.close() fortran_order = ( arr.flags.f_contiguous and not arr.flags.c_contiguous) ma = format.open_memmap(mfn, mode='w+', dtype=arr.dtype, shape=arr.shape, fortran_order=fortran_order) ma[...] = arr del ma # Check that both of these files' contents are the same. fp = open(nfn, 'rb') normal_bytes = fp.read() fp.close() fp = open(mfn, 'rb') memmap_bytes = fp.read() fp.close() assert_equal_(normal_bytes, memmap_bytes) # Check that reading the file using memmap works. ma = format.open_memmap(nfn, mode='r') del ma def test_compressed_roundtrip(): arr = np.random.rand(200, 200) npz_file = os.path.join(tempdir, 'compressed.npz') np.savez_compressed(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) # aligned dt1 = np.dtype('i1, i4, i1', align=True) # non-aligned, explicit offsets dt2 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6]}) # nested struct-in-struct dt3 = np.dtype({'names': ['c', 'd'], 'formats': ['i4', dt2]}) # field with '' name dt4 = np.dtype({'names': ['a', '', 'b'], 'formats': ['i4']*3}) # titles dt5 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6], 'titles': ['aa', 'bb']}) # empty dt6 = np.dtype({'names': [], 'formats': [], 'itemsize': 8}) @pytest.mark.parametrize("dt", [dt1, dt2, dt3, dt4, dt5, dt6]) def test_load_padded_dtype(dt): arr = np.zeros(3, dt) for i in range(3): arr[i] = i + 5 npz_file = os.path.join(tempdir, 'aligned.npz') np.savez(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) def test_python2_python3_interoperability(): fname = 'win64python2.npy' path = os.path.join(os.path.dirname(__file__), 'data', fname) data = np.load(path) assert_array_equal(data, np.ones(2)) def test_pickle_python2_python3(): # Test that loading object arrays saved on Python 2 works both on # Python 2 and Python 3 and vice versa data_dir = os.path.join(os.path.dirname(__file__), 'data') expected = np.array([None, range, u'\u512a\u826f', b'\xe4\xb8\x8d\xe8\x89\xaf'], dtype=object) for fname in ['py2-objarr.npy', 'py2-objarr.npz', 'py3-objarr.npy', 'py3-objarr.npz']: path = os.path.join(data_dir, fname) for encoding in ['bytes', 'latin1']: data_f = np.load(path, allow_pickle=True, encoding=encoding) if fname.endswith('.npz'): data = data_f['x'] data_f.close() else: data = data_f if encoding == 'latin1' and fname.startswith('py2'): assert_(isinstance(data[3], str)) assert_array_equal(data[:-1], expected[:-1]) # mojibake occurs assert_array_equal(data[-1].encode(encoding), expected[-1]) else: assert_(isinstance(data[3], bytes)) assert_array_equal(data, expected) if fname.startswith('py2'): if fname.endswith('.npz'): data = np.load(path, allow_pickle=True) assert_raises(UnicodeError, data.__getitem__, 'x') data.close() data = np.load(path, allow_pickle=True, fix_imports=False, encoding='latin1') assert_raises(ImportError, data.__getitem__, 'x') data.close() else: assert_raises(UnicodeError, np.load, path, allow_pickle=True) assert_raises(ImportError, np.load, path, allow_pickle=True, fix_imports=False, encoding='latin1') def test_pickle_disallow(): data_dir = os.path.join(os.path.dirname(__file__), 'data') path = os.path.join(data_dir, 'py2-objarr.npy') assert_raises(ValueError, np.load, path, allow_pickle=False, encoding='latin1') path = os.path.join(data_dir, 'py2-objarr.npz') f = np.load(path, allow_pickle=False, encoding='latin1') assert_raises(ValueError, f.__getitem__, 'x') path = os.path.join(tempdir, 'pickle-disabled.npy') assert_raises(ValueError, np.save, path, np.array([None], dtype=object), allow_pickle=False) @pytest.mark.parametrize('dt', [ np.dtype(np.dtype([('a', np.int8), ('b', np.int16), ('c', np.int32), ], align=True), (3,)), np.dtype([('x', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8, }, (3,)), (4,), )]), np.dtype([('x', ('<f8', (5,)), (2,), )]), np.dtype([('x', np.dtype(( np.dtype(( np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8}), (3,) )), (4,) ))) ]), np.dtype([ ('a', np.dtype(( np.dtype(( np.dtype(( np.dtype([ ('a', int), ('b', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8})), ]), (3,), )), (4,), )), (5,), ))) ]), ]) def test_descr_to_dtype(dt): dt1 = format.descr_to_dtype(dt.descr) assert_equal_(dt1, dt) arr1 = np.zeros(3, dt) arr2 = roundtrip(arr1) assert_array_equal(arr1, arr2) def test_version_2_0(): f = BytesIO() # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) format.write_array(f, d, version=(2, 0)) with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) format.write_array(f, d) assert_(w[0].category is UserWarning) # check alignment of data portion f.seek(0) header = f.readline() assert_(len(header) % format.ARRAY_ALIGN == 0) f.seek(0) n = format.read_array(f) assert_array_equal(d, n) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.write_array, f, d, (1, 0)) @pytest.mark.slow def test_version_2_0_memmap(): # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) tf = tempfile.mktemp('', 'mmap', dir=tempdir) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.open_memmap, tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(1, 0)) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(2, 0)) ma[...] = d del ma with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=None) assert_(w[0].category is UserWarning) ma[...] = d del ma ma = format.open_memmap(tf, mode='r') assert_array_equal(ma, d) def test_write_version(): f = BytesIO() arr = np.arange(1) # These should pass. format.write_array(f, arr, version=(1, 0)) format.write_array(f, arr) format.write_array(f, arr, version=None) format.write_array(f, arr) format.write_array(f, arr, version=(2, 0)) format.write_array(f, arr) # These should all fail. bad_versions = [ (1, 1), (0, 0), (0, 1), (2, 2), (255, 255), ] for version in bad_versions: with assert_raises_regex(ValueError, 'we only support format version.*'): format.write_array(f, arr, version=version) bad_version_magic = [ b'\x93NUMPY\x01\x01', b'\x93NUMPY\x00\x00', b'\x93NUMPY\x00\x01', b'\x93NUMPY\x02\x00', b'\x93NUMPY\x02\x02', b'\x93NUMPY\xff\xff', ] malformed_magic = [ b'\x92NUMPY\x01\x00', b'\x00NUMPY\x01\x00', b'\x93numpy\x01\x00', b'\x93MATLB\x01\x00', b'\x93NUMPY\x01', b'\x93NUMPY', b'', ] def test_read_magic(): s1 = BytesIO() s2 = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s1, arr, version=(1, 0)) format.write_array(s2, arr, version=(2, 0)) s1.seek(0) s2.seek(0) version1 = format.read_magic(s1) version2 = format.read_magic(s2) assert_(version1 == (1, 0)) assert_(version2 == (2, 0)) assert_(s1.tell() == format.MAGIC_LEN) assert_(s2.tell() == format.MAGIC_LEN) def test_read_magic_bad_magic(): for magic in malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_read_version_1_0_bad_magic(): for magic in bad_version_magic + malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_bad_magic_args(): assert_raises(ValueError, format.magic, -1, 1) assert_raises(ValueError, format.magic, 256, 1) assert_raises(ValueError, format.magic, 1, -1) assert_raises(ValueError, format.magic, 1, 256) def test_large_header(): s = BytesIO() d = {'a': 1, 'b': 2} format.write_array_header_1_0(s, d) s = BytesIO() d = {'a': 1, 'b': 2, 'c': 'x'*256*256} assert_raises(ValueError, format.write_array_header_1_0, s, d) def test_read_array_header_1_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(1, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_1_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_read_array_header_2_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(2, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_2_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_bad_header(): # header of length less than 2 should fail s = BytesIO() assert_raises(ValueError, format.read_array_header_1_0, s) s = BytesIO(b'1') assert_raises(ValueError, format.read_array_header_1_0, s) # header shorter than indicated size should fail s = BytesIO(b'\x01\x00') assert_raises(ValueError, format.read_array_header_1_0, s) # headers without the exact keys required should fail d = {"shape": (1, 2), "descr": "x"} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) d = {"shape": (1, 2), "fortran_order": False, "descr": "x", "extrakey": -1} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) def test_large_file_support(): if (sys.platform == 'win32' or sys.platform == 'cygwin'): pytest.skip("Unknown if Windows has sparse filesystems") # try creating a large sparse file tf_name = os.path.join(tempdir, 'sparse_file') try: # seek past end would work too, but linux truncate somewhat # increases the chances that we have a sparse filesystem and can # avoid actually writing 5GB import subprocess as sp sp.check_call(["truncate", "-s", "5368709120", tf_name]) except Exception: pytest.skip("Could not create 5GB large file") # write a small array to the end with open(tf_name, "wb") as f: f.seek(5368709120) d = np.arange(5) np.save(f, d) # read it back with open(tf_name, "rb") as f: f.seek(5368709120) r = np.load(f) assert_array_equal(r, d) @pytest.mark.skipif(np.dtype(np.intp).itemsize < 8, reason="test requires 64-bit system") @pytest.mark.slow def test_large_archive(): # Regression test for product of saving arrays with dimensions of array # having a product that doesn't fit in int32. See gh-7598 for details. try: a = np.empty((2**30, 2), dtype=np.uint8) except MemoryError: pytest.skip("Could not create large file") fname = os.path.join(tempdir, "large_archive") with open(fname, "wb") as f: np.savez(f, arr=a) with open(fname, "rb") as f: new_a = np.load(f)["arr"] assert_(a.shape == new_a.shape) def test_empty_npz(): # Test for gh-9989 fname = os.path.join(tempdir, "nothing.npz") np.savez(fname) np.load(fname) def test_unicode_field_names(): # gh-7391 arr = np.array([ (1, 3), (1, 2), (1, 3), (1, 2) ], dtype=[ ('int', int), (u'\N{CJK UNIFIED IDEOGRAPH-6574}\N{CJK UNIFIED IDEOGRAPH-5F62}', int) ]) fname = os.path.join(tempdir, "unicode.npy") with open(fname, 'wb') as f: format.write_array(f, arr, version=(3, 0)) with open(fname, 'rb') as f: arr2 = format.read_array(f) assert_array_equal(arr, arr2) # notifies the user that 3.0 is selected with open(fname, 'wb') as f: with assert_warns(UserWarning): format.write_array(f, arr, version=None) @pytest.mark.parametrize('dt, fail', [ (np.dtype({'names': ['a', 'b'], 'formats': [float, np.dtype('S3', metadata={'some': 'stuff'})]}), True), (np.dtype(int, metadata={'some': 'stuff'}), False), (np.dtype([('subarray', (int, (2,)))], metadata={'some': 'stuff'}), False), # recursive: metadata on the field of a dtype (np.dtype({'names': ['a', 'b'], 'formats': [ float, np.dtype({'names': ['c'], 'formats': [np.dtype(int, metadata={})]}) ]}), False) ]) def test_metadata_dtype(dt, fail): # gh-14142 arr = np.ones(10, dtype=dt) buf = BytesIO() with assert_warns(UserWarning): np.save(buf, arr) buf.seek(0) if fail: with assert_raises(ValueError): np.load(buf) else: arr2 = np.load(buf) # BUG: assert_array_equal does not check metadata from numpy.lib.format import _has_metadata assert_array_equal(arr, arr2) assert _has_metadata(arr.dtype) assert not _has_metadata(arr2.dtype)
simongibbons/numpy
numpy/lib/tests/test_format.py
numpy/polynomial/tests/test_laguerre.py
""" Standard container-class for easy multiple-inheritance. Try to inherit from the ndarray instead of using this class as this is not complete. """ from numpy.core import ( array, asarray, absolute, add, subtract, multiply, divide, remainder, power, left_shift, right_shift, bitwise_and, bitwise_or, bitwise_xor, invert, less, less_equal, not_equal, equal, greater, greater_equal, shape, reshape, arange, sin, sqrt, transpose ) class container: """ container(data, dtype=None, copy=True) Standard container-class for easy multiple-inheritance. Methods ------- copy tostring byteswap astype """ def __init__(self, data, dtype=None, copy=True): self.array = array(data, dtype, copy=copy) def __repr__(self): if self.ndim > 0: return self.__class__.__name__ + repr(self.array)[len("array"):] else: return self.__class__.__name__ + "(" + repr(self.array) + ")" def __array__(self, t=None): if t: return self.array.astype(t) return self.array # Array as sequence def __len__(self): return len(self.array) def __getitem__(self, index): return self._rc(self.array[index]) def __setitem__(self, index, value): self.array[index] = asarray(value, self.dtype) def __abs__(self): return self._rc(absolute(self.array)) def __neg__(self): return self._rc(-self.array) def __add__(self, other): return self._rc(self.array + asarray(other)) __radd__ = __add__ def __iadd__(self, other): add(self.array, other, self.array) return self def __sub__(self, other): return self._rc(self.array - asarray(other)) def __rsub__(self, other): return self._rc(asarray(other) - self.array) def __isub__(self, other): subtract(self.array, other, self.array) return self def __mul__(self, other): return self._rc(multiply(self.array, asarray(other))) __rmul__ = __mul__ def __imul__(self, other): multiply(self.array, other, self.array) return self def __div__(self, other): return self._rc(divide(self.array, asarray(other))) def __rdiv__(self, other): return self._rc(divide(asarray(other), self.array)) def __idiv__(self, other): divide(self.array, other, self.array) return self def __mod__(self, other): return self._rc(remainder(self.array, other)) def __rmod__(self, other): return self._rc(remainder(other, self.array)) def __imod__(self, other): remainder(self.array, other, self.array) return self def __divmod__(self, other): return (self._rc(divide(self.array, other)), self._rc(remainder(self.array, other))) def __rdivmod__(self, other): return (self._rc(divide(other, self.array)), self._rc(remainder(other, self.array))) def __pow__(self, other): return self._rc(power(self.array, asarray(other))) def __rpow__(self, other): return self._rc(power(asarray(other), self.array)) def __ipow__(self, other): power(self.array, other, self.array) return self def __lshift__(self, other): return self._rc(left_shift(self.array, other)) def __rshift__(self, other): return self._rc(right_shift(self.array, other)) def __rlshift__(self, other): return self._rc(left_shift(other, self.array)) def __rrshift__(self, other): return self._rc(right_shift(other, self.array)) def __ilshift__(self, other): left_shift(self.array, other, self.array) return self def __irshift__(self, other): right_shift(self.array, other, self.array) return self def __and__(self, other): return self._rc(bitwise_and(self.array, other)) def __rand__(self, other): return self._rc(bitwise_and(other, self.array)) def __iand__(self, other): bitwise_and(self.array, other, self.array) return self def __xor__(self, other): return self._rc(bitwise_xor(self.array, other)) def __rxor__(self, other): return self._rc(bitwise_xor(other, self.array)) def __ixor__(self, other): bitwise_xor(self.array, other, self.array) return self def __or__(self, other): return self._rc(bitwise_or(self.array, other)) def __ror__(self, other): return self._rc(bitwise_or(other, self.array)) def __ior__(self, other): bitwise_or(self.array, other, self.array) return self def __pos__(self): return self._rc(self.array) def __invert__(self): return self._rc(invert(self.array)) def _scalarfunc(self, func): if self.ndim == 0: return func(self[0]) else: raise TypeError( "only rank-0 arrays can be converted to Python scalars.") def __complex__(self): return self._scalarfunc(complex) def __float__(self): return self._scalarfunc(float) def __int__(self): return self._scalarfunc(int) def __hex__(self): return self._scalarfunc(hex) def __oct__(self): return self._scalarfunc(oct) def __lt__(self, other): return self._rc(less(self.array, other)) def __le__(self, other): return self._rc(less_equal(self.array, other)) def __eq__(self, other): return self._rc(equal(self.array, other)) def __ne__(self, other): return self._rc(not_equal(self.array, other)) def __gt__(self, other): return self._rc(greater(self.array, other)) def __ge__(self, other): return self._rc(greater_equal(self.array, other)) def copy(self): "" return self._rc(self.array.copy()) def tostring(self): "" return self.array.tostring() def tobytes(self): "" return self.array.tobytes() def byteswap(self): "" return self._rc(self.array.byteswap()) def astype(self, typecode): "" return self._rc(self.array.astype(typecode)) def _rc(self, a): if len(shape(a)) == 0: return a else: return self.__class__(a) def __array_wrap__(self, *args): return self.__class__(args[0]) def __setattr__(self, attr, value): if attr == 'array': object.__setattr__(self, attr, value) return try: self.array.__setattr__(attr, value) except AttributeError: object.__setattr__(self, attr, value) # Only called after other approaches fail. def __getattr__(self, attr): if (attr == 'array'): return object.__getattribute__(self, attr) return self.array.__getattribute__(attr) ############################################################# # Test of class container ############################################################# if __name__ == '__main__': temp = reshape(arange(10000), (100, 100)) ua = container(temp) # new object created begin test print(dir(ua)) print(shape(ua), ua.shape) # I have changed Numeric.py ua_small = ua[:3, :5] print(ua_small) # this did not change ua[0,0], which is not normal behavior ua_small[0, 0] = 10 print(ua_small[0, 0], ua[0, 0]) print(sin(ua_small) / 3. * 6. + sqrt(ua_small ** 2)) print(less(ua_small, 103), type(less(ua_small, 103))) print(type(ua_small * reshape(arange(15), shape(ua_small)))) print(reshape(ua_small, (5, 3))) print(transpose(ua_small))
# doctest r''' Test the .npy file format. Set up: >>> import sys >>> from io import BytesIO >>> from numpy.lib import format >>> >>> scalars = [ ... np.uint8, ... np.int8, ... np.uint16, ... np.int16, ... np.uint32, ... np.int32, ... np.uint64, ... np.int64, ... np.float32, ... np.float64, ... np.complex64, ... np.complex128, ... object, ... ] >>> >>> basic_arrays = [] >>> >>> for scalar in scalars: ... for endian in '<>': ... dtype = np.dtype(scalar).newbyteorder(endian) ... basic = np.arange(15).astype(dtype) ... basic_arrays.extend([ ... np.array([], dtype=dtype), ... np.array(10, dtype=dtype), ... basic, ... basic.reshape((3,5)), ... basic.reshape((3,5)).T, ... basic.reshape((3,5))[::-1,::2], ... ]) ... >>> >>> Pdescr = [ ... ('x', 'i4', (2,)), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> PbufferT = [ ... ([3,2], [[6.,4.],[6.,4.]], 8), ... ([4,3], [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> Ndescr = [ ... ('x', 'i4', (2,)), ... ('Info', [ ... ('value', 'c16'), ... ('y2', 'f8'), ... ('Info2', [ ... ('name', 'S2'), ... ('value', 'c16', (2,)), ... ('y3', 'f8', (2,)), ... ('z3', 'u4', (2,))]), ... ('name', 'S2'), ... ('z2', 'b1')]), ... ('color', 'S2'), ... ('info', [ ... ('Name', 'U8'), ... ('Value', 'c16')]), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> NbufferT = [ ... ([3,2], (6j, 6., ('nn', [6j,4j], [6.,4.], [1,2]), 'NN', True), 'cc', ('NN', 6j), [[6.,4.],[6.,4.]], 8), ... ([4,3], (7j, 7., ('oo', [7j,5j], [7.,5.], [2,1]), 'OO', False), 'dd', ('OO', 7j), [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> record_arrays = [ ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), ... ] Test the magic string writing. >>> format.magic(1, 0) '\x93NUMPY\x01\x00' >>> format.magic(0, 0) '\x93NUMPY\x00\x00' >>> format.magic(255, 255) '\x93NUMPY\xff\xff' >>> format.magic(2, 5) '\x93NUMPY\x02\x05' Test the magic string reading. >>> format.read_magic(BytesIO(format.magic(1, 0))) (1, 0) >>> format.read_magic(BytesIO(format.magic(0, 0))) (0, 0) >>> format.read_magic(BytesIO(format.magic(255, 255))) (255, 255) >>> format.read_magic(BytesIO(format.magic(2, 5))) (2, 5) Test the header writing. >>> for arr in basic_arrays + record_arrays: ... f = BytesIO() ... format.write_array_header_1_0(f, arr) # XXX: arr is not a dict, items gets called on it ... print(repr(f.getvalue())) ... "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "v\x00{'descr': [('x', '<i4', (2,)), ('y', '<f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '<i4', (2,)),\n ('Info',\n [('value', '<c16'),\n ('y2', '<f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '<c16', (2,)),\n ('y3', '<f8', (2,)),\n ('z3', '<u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '<U8'), ('Value', '<c16')]),\n ('y', '<f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "v\x00{'descr': [('x', '>i4', (2,)), ('y', '>f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '>i4', (2,)),\n ('Info',\n [('value', '>c16'),\n ('y2', '>f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '>c16', (2,)),\n ('y3', '>f8', (2,)),\n ('z3', '>u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '>U8'), ('Value', '>c16')]),\n ('y', '>f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" ''' import sys import os import shutil import tempfile import warnings import pytest from io import BytesIO import numpy as np from numpy.testing import ( assert_, assert_array_equal, assert_raises, assert_raises_regex, assert_warns ) from numpy.lib import format tempdir = None # Module-level setup. def setup_module(): global tempdir tempdir = tempfile.mkdtemp() def teardown_module(): global tempdir if tempdir is not None and os.path.isdir(tempdir): shutil.rmtree(tempdir) tempdir = None # Generate some basic arrays to test with. scalars = [ np.uint8, np.int8, np.uint16, np.int16, np.uint32, np.int32, np.uint64, np.int64, np.float32, np.float64, np.complex64, np.complex128, object, ] basic_arrays = [] for scalar in scalars: for endian in '<>': dtype = np.dtype(scalar).newbyteorder(endian) basic = np.arange(1500).astype(dtype) basic_arrays.extend([ # Empty np.array([], dtype=dtype), # Rank-0 np.array(10, dtype=dtype), # 1-D basic, # 2-D C-contiguous basic.reshape((30, 50)), # 2-D F-contiguous basic.reshape((30, 50)).T, # 2-D non-contiguous basic.reshape((30, 50))[::-1, ::2], ]) # More complicated record arrays. # This is the structure of the table used for plain objects: # # +-+-+-+ # |x|y|z| # +-+-+-+ # Structure of a plain array description: Pdescr = [ ('x', 'i4', (2,)), ('y', 'f8', (2, 2)), ('z', 'u1')] # A plain list of tuples with values for testing: PbufferT = [ # x y z ([3, 2], [[6., 4.], [6., 4.]], 8), ([4, 3], [[7., 5.], [7., 5.]], 9), ] # This is the structure of the table used for nested objects (DON'T PANIC!): # # +-+---------------------------------+-----+----------+-+-+ # |x|Info |color|info |y|z| # | +-----+--+----------------+----+--+ +----+-----+ | | # | |value|y2|Info2 |name|z2| |Name|Value| | | # | | | +----+-----+--+--+ | | | | | | | # | | | |name|value|y3|z3| | | | | | | | # +-+-----+--+----+-----+--+--+----+--+-----+----+-----+-+-+ # # The corresponding nested array description: Ndescr = [ ('x', 'i4', (2,)), ('Info', [ ('value', 'c16'), ('y2', 'f8'), ('Info2', [ ('name', 'S2'), ('value', 'c16', (2,)), ('y3', 'f8', (2,)), ('z3', 'u4', (2,))]), ('name', 'S2'), ('z2', 'b1')]), ('color', 'S2'), ('info', [ ('Name', 'U8'), ('Value', 'c16')]), ('y', 'f8', (2, 2)), ('z', 'u1')] NbufferT = [ # x Info color info y z # value y2 Info2 name z2 Name Value # name value y3 z3 ([3, 2], (6j, 6., ('nn', [6j, 4j], [6., 4.], [1, 2]), 'NN', True), 'cc', ('NN', 6j), [[6., 4.], [6., 4.]], 8), ([4, 3], (7j, 7., ('oo', [7j, 5j], [7., 5.], [2, 1]), 'OO', False), 'dd', ('OO', 7j), [[7., 5.], [7., 5.]], 9), ] record_arrays = [ np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), np.zeros(1, dtype=[('c', ('<f8', (5,)), (2,))]) ] #BytesIO that reads a random number of bytes at a time class BytesIOSRandomSize(BytesIO): def read(self, size=None): import random size = random.randint(1, size) return super(BytesIOSRandomSize, self).read(size) def roundtrip(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIO(f.getvalue()) arr2 = format.read_array(f2, allow_pickle=True) return arr2 def roundtrip_randsize(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIOSRandomSize(f.getvalue()) arr2 = format.read_array(f2) return arr2 def roundtrip_truncated(arr): f = BytesIO() format.write_array(f, arr) #BytesIO is one byte short f2 = BytesIO(f.getvalue()[0:-1]) arr2 = format.read_array(f2) return arr2 def assert_equal_(o1, o2): assert_(o1 == o2) def test_roundtrip(): for arr in basic_arrays + record_arrays: arr2 = roundtrip(arr) assert_array_equal(arr, arr2) def test_roundtrip_randsize(): for arr in basic_arrays + record_arrays: if arr.dtype != object: arr2 = roundtrip_randsize(arr) assert_array_equal(arr, arr2) def test_roundtrip_truncated(): for arr in basic_arrays: if arr.dtype != object: assert_raises(ValueError, roundtrip_truncated, arr) def test_long_str(): # check items larger than internal buffer size, gh-4027 long_str_arr = np.ones(1, dtype=np.dtype((str, format.BUFFER_SIZE + 1))) long_str_arr2 = roundtrip(long_str_arr) assert_array_equal(long_str_arr, long_str_arr2) @pytest.mark.slow def test_memmap_roundtrip(): # Fixme: used to crash on windows if not (sys.platform == 'win32' or sys.platform == 'cygwin'): for arr in basic_arrays + record_arrays: if arr.dtype.hasobject: # Skip these since they can't be mmap'ed. continue # Write it out normally and through mmap. nfn = os.path.join(tempdir, 'normal.npy') mfn = os.path.join(tempdir, 'memmap.npy') fp = open(nfn, 'wb') try: format.write_array(fp, arr) finally: fp.close() fortran_order = ( arr.flags.f_contiguous and not arr.flags.c_contiguous) ma = format.open_memmap(mfn, mode='w+', dtype=arr.dtype, shape=arr.shape, fortran_order=fortran_order) ma[...] = arr del ma # Check that both of these files' contents are the same. fp = open(nfn, 'rb') normal_bytes = fp.read() fp.close() fp = open(mfn, 'rb') memmap_bytes = fp.read() fp.close() assert_equal_(normal_bytes, memmap_bytes) # Check that reading the file using memmap works. ma = format.open_memmap(nfn, mode='r') del ma def test_compressed_roundtrip(): arr = np.random.rand(200, 200) npz_file = os.path.join(tempdir, 'compressed.npz') np.savez_compressed(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) # aligned dt1 = np.dtype('i1, i4, i1', align=True) # non-aligned, explicit offsets dt2 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6]}) # nested struct-in-struct dt3 = np.dtype({'names': ['c', 'd'], 'formats': ['i4', dt2]}) # field with '' name dt4 = np.dtype({'names': ['a', '', 'b'], 'formats': ['i4']*3}) # titles dt5 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6], 'titles': ['aa', 'bb']}) # empty dt6 = np.dtype({'names': [], 'formats': [], 'itemsize': 8}) @pytest.mark.parametrize("dt", [dt1, dt2, dt3, dt4, dt5, dt6]) def test_load_padded_dtype(dt): arr = np.zeros(3, dt) for i in range(3): arr[i] = i + 5 npz_file = os.path.join(tempdir, 'aligned.npz') np.savez(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) def test_python2_python3_interoperability(): fname = 'win64python2.npy' path = os.path.join(os.path.dirname(__file__), 'data', fname) data = np.load(path) assert_array_equal(data, np.ones(2)) def test_pickle_python2_python3(): # Test that loading object arrays saved on Python 2 works both on # Python 2 and Python 3 and vice versa data_dir = os.path.join(os.path.dirname(__file__), 'data') expected = np.array([None, range, u'\u512a\u826f', b'\xe4\xb8\x8d\xe8\x89\xaf'], dtype=object) for fname in ['py2-objarr.npy', 'py2-objarr.npz', 'py3-objarr.npy', 'py3-objarr.npz']: path = os.path.join(data_dir, fname) for encoding in ['bytes', 'latin1']: data_f = np.load(path, allow_pickle=True, encoding=encoding) if fname.endswith('.npz'): data = data_f['x'] data_f.close() else: data = data_f if encoding == 'latin1' and fname.startswith('py2'): assert_(isinstance(data[3], str)) assert_array_equal(data[:-1], expected[:-1]) # mojibake occurs assert_array_equal(data[-1].encode(encoding), expected[-1]) else: assert_(isinstance(data[3], bytes)) assert_array_equal(data, expected) if fname.startswith('py2'): if fname.endswith('.npz'): data = np.load(path, allow_pickle=True) assert_raises(UnicodeError, data.__getitem__, 'x') data.close() data = np.load(path, allow_pickle=True, fix_imports=False, encoding='latin1') assert_raises(ImportError, data.__getitem__, 'x') data.close() else: assert_raises(UnicodeError, np.load, path, allow_pickle=True) assert_raises(ImportError, np.load, path, allow_pickle=True, fix_imports=False, encoding='latin1') def test_pickle_disallow(): data_dir = os.path.join(os.path.dirname(__file__), 'data') path = os.path.join(data_dir, 'py2-objarr.npy') assert_raises(ValueError, np.load, path, allow_pickle=False, encoding='latin1') path = os.path.join(data_dir, 'py2-objarr.npz') f = np.load(path, allow_pickle=False, encoding='latin1') assert_raises(ValueError, f.__getitem__, 'x') path = os.path.join(tempdir, 'pickle-disabled.npy') assert_raises(ValueError, np.save, path, np.array([None], dtype=object), allow_pickle=False) @pytest.mark.parametrize('dt', [ np.dtype(np.dtype([('a', np.int8), ('b', np.int16), ('c', np.int32), ], align=True), (3,)), np.dtype([('x', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8, }, (3,)), (4,), )]), np.dtype([('x', ('<f8', (5,)), (2,), )]), np.dtype([('x', np.dtype(( np.dtype(( np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8}), (3,) )), (4,) ))) ]), np.dtype([ ('a', np.dtype(( np.dtype(( np.dtype(( np.dtype([ ('a', int), ('b', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8})), ]), (3,), )), (4,), )), (5,), ))) ]), ]) def test_descr_to_dtype(dt): dt1 = format.descr_to_dtype(dt.descr) assert_equal_(dt1, dt) arr1 = np.zeros(3, dt) arr2 = roundtrip(arr1) assert_array_equal(arr1, arr2) def test_version_2_0(): f = BytesIO() # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) format.write_array(f, d, version=(2, 0)) with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) format.write_array(f, d) assert_(w[0].category is UserWarning) # check alignment of data portion f.seek(0) header = f.readline() assert_(len(header) % format.ARRAY_ALIGN == 0) f.seek(0) n = format.read_array(f) assert_array_equal(d, n) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.write_array, f, d, (1, 0)) @pytest.mark.slow def test_version_2_0_memmap(): # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) tf = tempfile.mktemp('', 'mmap', dir=tempdir) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.open_memmap, tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(1, 0)) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(2, 0)) ma[...] = d del ma with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=None) assert_(w[0].category is UserWarning) ma[...] = d del ma ma = format.open_memmap(tf, mode='r') assert_array_equal(ma, d) def test_write_version(): f = BytesIO() arr = np.arange(1) # These should pass. format.write_array(f, arr, version=(1, 0)) format.write_array(f, arr) format.write_array(f, arr, version=None) format.write_array(f, arr) format.write_array(f, arr, version=(2, 0)) format.write_array(f, arr) # These should all fail. bad_versions = [ (1, 1), (0, 0), (0, 1), (2, 2), (255, 255), ] for version in bad_versions: with assert_raises_regex(ValueError, 'we only support format version.*'): format.write_array(f, arr, version=version) bad_version_magic = [ b'\x93NUMPY\x01\x01', b'\x93NUMPY\x00\x00', b'\x93NUMPY\x00\x01', b'\x93NUMPY\x02\x00', b'\x93NUMPY\x02\x02', b'\x93NUMPY\xff\xff', ] malformed_magic = [ b'\x92NUMPY\x01\x00', b'\x00NUMPY\x01\x00', b'\x93numpy\x01\x00', b'\x93MATLB\x01\x00', b'\x93NUMPY\x01', b'\x93NUMPY', b'', ] def test_read_magic(): s1 = BytesIO() s2 = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s1, arr, version=(1, 0)) format.write_array(s2, arr, version=(2, 0)) s1.seek(0) s2.seek(0) version1 = format.read_magic(s1) version2 = format.read_magic(s2) assert_(version1 == (1, 0)) assert_(version2 == (2, 0)) assert_(s1.tell() == format.MAGIC_LEN) assert_(s2.tell() == format.MAGIC_LEN) def test_read_magic_bad_magic(): for magic in malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_read_version_1_0_bad_magic(): for magic in bad_version_magic + malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_bad_magic_args(): assert_raises(ValueError, format.magic, -1, 1) assert_raises(ValueError, format.magic, 256, 1) assert_raises(ValueError, format.magic, 1, -1) assert_raises(ValueError, format.magic, 1, 256) def test_large_header(): s = BytesIO() d = {'a': 1, 'b': 2} format.write_array_header_1_0(s, d) s = BytesIO() d = {'a': 1, 'b': 2, 'c': 'x'*256*256} assert_raises(ValueError, format.write_array_header_1_0, s, d) def test_read_array_header_1_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(1, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_1_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_read_array_header_2_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(2, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_2_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_bad_header(): # header of length less than 2 should fail s = BytesIO() assert_raises(ValueError, format.read_array_header_1_0, s) s = BytesIO(b'1') assert_raises(ValueError, format.read_array_header_1_0, s) # header shorter than indicated size should fail s = BytesIO(b'\x01\x00') assert_raises(ValueError, format.read_array_header_1_0, s) # headers without the exact keys required should fail d = {"shape": (1, 2), "descr": "x"} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) d = {"shape": (1, 2), "fortran_order": False, "descr": "x", "extrakey": -1} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) def test_large_file_support(): if (sys.platform == 'win32' or sys.platform == 'cygwin'): pytest.skip("Unknown if Windows has sparse filesystems") # try creating a large sparse file tf_name = os.path.join(tempdir, 'sparse_file') try: # seek past end would work too, but linux truncate somewhat # increases the chances that we have a sparse filesystem and can # avoid actually writing 5GB import subprocess as sp sp.check_call(["truncate", "-s", "5368709120", tf_name]) except Exception: pytest.skip("Could not create 5GB large file") # write a small array to the end with open(tf_name, "wb") as f: f.seek(5368709120) d = np.arange(5) np.save(f, d) # read it back with open(tf_name, "rb") as f: f.seek(5368709120) r = np.load(f) assert_array_equal(r, d) @pytest.mark.skipif(np.dtype(np.intp).itemsize < 8, reason="test requires 64-bit system") @pytest.mark.slow def test_large_archive(): # Regression test for product of saving arrays with dimensions of array # having a product that doesn't fit in int32. See gh-7598 for details. try: a = np.empty((2**30, 2), dtype=np.uint8) except MemoryError: pytest.skip("Could not create large file") fname = os.path.join(tempdir, "large_archive") with open(fname, "wb") as f: np.savez(f, arr=a) with open(fname, "rb") as f: new_a = np.load(f)["arr"] assert_(a.shape == new_a.shape) def test_empty_npz(): # Test for gh-9989 fname = os.path.join(tempdir, "nothing.npz") np.savez(fname) np.load(fname) def test_unicode_field_names(): # gh-7391 arr = np.array([ (1, 3), (1, 2), (1, 3), (1, 2) ], dtype=[ ('int', int), (u'\N{CJK UNIFIED IDEOGRAPH-6574}\N{CJK UNIFIED IDEOGRAPH-5F62}', int) ]) fname = os.path.join(tempdir, "unicode.npy") with open(fname, 'wb') as f: format.write_array(f, arr, version=(3, 0)) with open(fname, 'rb') as f: arr2 = format.read_array(f) assert_array_equal(arr, arr2) # notifies the user that 3.0 is selected with open(fname, 'wb') as f: with assert_warns(UserWarning): format.write_array(f, arr, version=None) @pytest.mark.parametrize('dt, fail', [ (np.dtype({'names': ['a', 'b'], 'formats': [float, np.dtype('S3', metadata={'some': 'stuff'})]}), True), (np.dtype(int, metadata={'some': 'stuff'}), False), (np.dtype([('subarray', (int, (2,)))], metadata={'some': 'stuff'}), False), # recursive: metadata on the field of a dtype (np.dtype({'names': ['a', 'b'], 'formats': [ float, np.dtype({'names': ['c'], 'formats': [np.dtype(int, metadata={})]}) ]}), False) ]) def test_metadata_dtype(dt, fail): # gh-14142 arr = np.ones(10, dtype=dt) buf = BytesIO() with assert_warns(UserWarning): np.save(buf, arr) buf.seek(0) if fail: with assert_raises(ValueError): np.load(buf) else: arr2 = np.load(buf) # BUG: assert_array_equal does not check metadata from numpy.lib.format import _has_metadata assert_array_equal(arr, arr2) assert _has_metadata(arr.dtype) assert not _has_metadata(arr2.dtype)
simongibbons/numpy
numpy/lib/tests/test_format.py
numpy/lib/user_array.py
""" Implementation of optimized einsum. """ import itertools from numpy.core.multiarray import c_einsum from numpy.core.numeric import asanyarray, tensordot from numpy.core.overrides import array_function_dispatch __all__ = ['einsum', 'einsum_path'] einsum_symbols = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' einsum_symbols_set = set(einsum_symbols) def _flop_count(idx_contraction, inner, num_terms, size_dictionary): """ Computes the number of FLOPS in the contraction. Parameters ---------- idx_contraction : iterable The indices involved in the contraction inner : bool Does this contraction require an inner product? num_terms : int The number of terms in a contraction size_dictionary : dict The size of each of the indices in idx_contraction Returns ------- flop_count : int The total number of FLOPS required for the contraction. Examples -------- >>> _flop_count('abc', False, 1, {'a': 2, 'b':3, 'c':5}) 30 >>> _flop_count('abc', True, 2, {'a': 2, 'b':3, 'c':5}) 60 """ overall_size = _compute_size_by_dict(idx_contraction, size_dictionary) op_factor = max(1, num_terms - 1) if inner: op_factor += 1 return overall_size * op_factor def _compute_size_by_dict(indices, idx_dict): """ Computes the product of the elements in indices based on the dictionary idx_dict. Parameters ---------- indices : iterable Indices to base the product on. idx_dict : dictionary Dictionary of index sizes Returns ------- ret : int The resulting product. Examples -------- >>> _compute_size_by_dict('abbc', {'a': 2, 'b':3, 'c':5}) 90 """ ret = 1 for i in indices: ret *= idx_dict[i] return ret def _find_contraction(positions, input_sets, output_set): """ Finds the contraction for a given set of input and output sets. Parameters ---------- positions : iterable Integer positions of terms used in the contraction. input_sets : list List of sets that represent the lhs side of the einsum subscript output_set : set Set that represents the rhs side of the overall einsum subscript Returns ------- new_result : set The indices of the resulting contraction remaining : list List of sets that have not been contracted, the new set is appended to the end of this list idx_removed : set Indices removed from the entire contraction idx_contraction : set The indices used in the current contraction Examples -------- # A simple dot product test case >>> pos = (0, 1) >>> isets = [set('ab'), set('bc')] >>> oset = set('ac') >>> _find_contraction(pos, isets, oset) ({'a', 'c'}, [{'a', 'c'}], {'b'}, {'a', 'b', 'c'}) # A more complex case with additional terms in the contraction >>> pos = (0, 2) >>> isets = [set('abd'), set('ac'), set('bdc')] >>> oset = set('ac') >>> _find_contraction(pos, isets, oset) ({'a', 'c'}, [{'a', 'c'}, {'a', 'c'}], {'b', 'd'}, {'a', 'b', 'c', 'd'}) """ idx_contract = set() idx_remain = output_set.copy() remaining = [] for ind, value in enumerate(input_sets): if ind in positions: idx_contract |= value else: remaining.append(value) idx_remain |= value new_result = idx_remain & idx_contract idx_removed = (idx_contract - new_result) remaining.append(new_result) return (new_result, remaining, idx_removed, idx_contract) def _optimal_path(input_sets, output_set, idx_dict, memory_limit): """ Computes all possible pair contractions, sieves the results based on ``memory_limit`` and returns the lowest cost path. This algorithm scales factorial with respect to the elements in the list ``input_sets``. Parameters ---------- input_sets : list List of sets that represent the lhs side of the einsum subscript output_set : set Set that represents the rhs side of the overall einsum subscript idx_dict : dictionary Dictionary of index sizes memory_limit : int The maximum number of elements in a temporary array Returns ------- path : list The optimal contraction order within the memory limit constraint. Examples -------- >>> isets = [set('abd'), set('ac'), set('bdc')] >>> oset = set() >>> idx_sizes = {'a': 1, 'b':2, 'c':3, 'd':4} >>> _optimal_path(isets, oset, idx_sizes, 5000) [(0, 2), (0, 1)] """ full_results = [(0, [], input_sets)] for iteration in range(len(input_sets) - 1): iter_results = [] # Compute all unique pairs for curr in full_results: cost, positions, remaining = curr for con in itertools.combinations(range(len(input_sets) - iteration), 2): # Find the contraction cont = _find_contraction(con, remaining, output_set) new_result, new_input_sets, idx_removed, idx_contract = cont # Sieve the results based on memory_limit new_size = _compute_size_by_dict(new_result, idx_dict) if new_size > memory_limit: continue # Build (total_cost, positions, indices_remaining) total_cost = cost + _flop_count(idx_contract, idx_removed, len(con), idx_dict) new_pos = positions + [con] iter_results.append((total_cost, new_pos, new_input_sets)) # Update combinatorial list, if we did not find anything return best # path + remaining contractions if iter_results: full_results = iter_results else: path = min(full_results, key=lambda x: x[0])[1] path += [tuple(range(len(input_sets) - iteration))] return path # If we have not found anything return single einsum contraction if len(full_results) == 0: return [tuple(range(len(input_sets)))] path = min(full_results, key=lambda x: x[0])[1] return path def _parse_possible_contraction(positions, input_sets, output_set, idx_dict, memory_limit, path_cost, naive_cost): """Compute the cost (removed size + flops) and resultant indices for performing the contraction specified by ``positions``. Parameters ---------- positions : tuple of int The locations of the proposed tensors to contract. input_sets : list of sets The indices found on each tensors. output_set : set The output indices of the expression. idx_dict : dict Mapping of each index to its size. memory_limit : int The total allowed size for an intermediary tensor. path_cost : int The contraction cost so far. naive_cost : int The cost of the unoptimized expression. Returns ------- cost : (int, int) A tuple containing the size of any indices removed, and the flop cost. positions : tuple of int The locations of the proposed tensors to contract. new_input_sets : list of sets The resulting new list of indices if this proposed contraction is performed. """ # Find the contraction contract = _find_contraction(positions, input_sets, output_set) idx_result, new_input_sets, idx_removed, idx_contract = contract # Sieve the results based on memory_limit new_size = _compute_size_by_dict(idx_result, idx_dict) if new_size > memory_limit: return None # Build sort tuple old_sizes = (_compute_size_by_dict(input_sets[p], idx_dict) for p in positions) removed_size = sum(old_sizes) - new_size # NB: removed_size used to be just the size of any removed indices i.e.: # helpers.compute_size_by_dict(idx_removed, idx_dict) cost = _flop_count(idx_contract, idx_removed, len(positions), idx_dict) sort = (-removed_size, cost) # Sieve based on total cost as well if (path_cost + cost) > naive_cost: return None # Add contraction to possible choices return [sort, positions, new_input_sets] def _update_other_results(results, best): """Update the positions and provisional input_sets of ``results`` based on performing the contraction result ``best``. Remove any involving the tensors contracted. Parameters ---------- results : list List of contraction results produced by ``_parse_possible_contraction``. best : list The best contraction of ``results`` i.e. the one that will be performed. Returns ------- mod_results : list The list of modified results, updated with outcome of ``best`` contraction. """ best_con = best[1] bx, by = best_con mod_results = [] for cost, (x, y), con_sets in results: # Ignore results involving tensors just contracted if x in best_con or y in best_con: continue # Update the input_sets del con_sets[by - int(by > x) - int(by > y)] del con_sets[bx - int(bx > x) - int(bx > y)] con_sets.insert(-1, best[2][-1]) # Update the position indices mod_con = x - int(x > bx) - int(x > by), y - int(y > bx) - int(y > by) mod_results.append((cost, mod_con, con_sets)) return mod_results def _greedy_path(input_sets, output_set, idx_dict, memory_limit): """ Finds the path by contracting the best pair until the input list is exhausted. The best pair is found by minimizing the tuple ``(-prod(indices_removed), cost)``. What this amounts to is prioritizing matrix multiplication or inner product operations, then Hadamard like operations, and finally outer operations. Outer products are limited by ``memory_limit``. This algorithm scales cubically with respect to the number of elements in the list ``input_sets``. Parameters ---------- input_sets : list List of sets that represent the lhs side of the einsum subscript output_set : set Set that represents the rhs side of the overall einsum subscript idx_dict : dictionary Dictionary of index sizes memory_limit_limit : int The maximum number of elements in a temporary array Returns ------- path : list The greedy contraction order within the memory limit constraint. Examples -------- >>> isets = [set('abd'), set('ac'), set('bdc')] >>> oset = set() >>> idx_sizes = {'a': 1, 'b':2, 'c':3, 'd':4} >>> _greedy_path(isets, oset, idx_sizes, 5000) [(0, 2), (0, 1)] """ # Handle trivial cases that leaked through if len(input_sets) == 1: return [(0,)] elif len(input_sets) == 2: return [(0, 1)] # Build up a naive cost contract = _find_contraction(range(len(input_sets)), input_sets, output_set) idx_result, new_input_sets, idx_removed, idx_contract = contract naive_cost = _flop_count(idx_contract, idx_removed, len(input_sets), idx_dict) # Initially iterate over all pairs comb_iter = itertools.combinations(range(len(input_sets)), 2) known_contractions = [] path_cost = 0 path = [] for iteration in range(len(input_sets) - 1): # Iterate over all pairs on first step, only previously found pairs on subsequent steps for positions in comb_iter: # Always initially ignore outer products if input_sets[positions[0]].isdisjoint(input_sets[positions[1]]): continue result = _parse_possible_contraction(positions, input_sets, output_set, idx_dict, memory_limit, path_cost, naive_cost) if result is not None: known_contractions.append(result) # If we do not have a inner contraction, rescan pairs including outer products if len(known_contractions) == 0: # Then check the outer products for positions in itertools.combinations(range(len(input_sets)), 2): result = _parse_possible_contraction(positions, input_sets, output_set, idx_dict, memory_limit, path_cost, naive_cost) if result is not None: known_contractions.append(result) # If we still did not find any remaining contractions, default back to einsum like behavior if len(known_contractions) == 0: path.append(tuple(range(len(input_sets)))) break # Sort based on first index best = min(known_contractions, key=lambda x: x[0]) # Now propagate as many unused contractions as possible to next iteration known_contractions = _update_other_results(known_contractions, best) # Next iteration only compute contractions with the new tensor # All other contractions have been accounted for input_sets = best[2] new_tensor_pos = len(input_sets) - 1 comb_iter = ((i, new_tensor_pos) for i in range(new_tensor_pos)) # Update path and total cost path.append(best[1]) path_cost += best[0][1] return path def _can_dot(inputs, result, idx_removed): """ Checks if we can use BLAS (np.tensordot) call and its beneficial to do so. Parameters ---------- inputs : list of str Specifies the subscripts for summation. result : str Resulting summation. idx_removed : set Indices that are removed in the summation Returns ------- type : bool Returns true if BLAS should and can be used, else False Notes ----- If the operations is BLAS level 1 or 2 and is not already aligned we default back to einsum as the memory movement to copy is more costly than the operation itself. Examples -------- # Standard GEMM operation >>> _can_dot(['ij', 'jk'], 'ik', set('j')) True # Can use the standard BLAS, but requires odd data movement >>> _can_dot(['ijj', 'jk'], 'ik', set('j')) False # DDOT where the memory is not aligned >>> _can_dot(['ijk', 'ikj'], '', set('ijk')) False """ # All `dot` calls remove indices if len(idx_removed) == 0: return False # BLAS can only handle two operands if len(inputs) != 2: return False input_left, input_right = inputs for c in set(input_left + input_right): # can't deal with repeated indices on same input or more than 2 total nl, nr = input_left.count(c), input_right.count(c) if (nl > 1) or (nr > 1) or (nl + nr > 2): return False # can't do implicit summation or dimension collapse e.g. # "ab,bc->c" (implicitly sum over 'a') # "ab,ca->ca" (take diagonal of 'a') if nl + nr - 1 == int(c in result): return False # Build a few temporaries set_left = set(input_left) set_right = set(input_right) keep_left = set_left - idx_removed keep_right = set_right - idx_removed rs = len(idx_removed) # At this point we are a DOT, GEMV, or GEMM operation # Handle inner products # DDOT with aligned data if input_left == input_right: return True # DDOT without aligned data (better to use einsum) if set_left == set_right: return False # Handle the 4 possible (aligned) GEMV or GEMM cases # GEMM or GEMV no transpose if input_left[-rs:] == input_right[:rs]: return True # GEMM or GEMV transpose both if input_left[:rs] == input_right[-rs:]: return True # GEMM or GEMV transpose right if input_left[-rs:] == input_right[-rs:]: return True # GEMM or GEMV transpose left if input_left[:rs] == input_right[:rs]: return True # Einsum is faster than GEMV if we have to copy data if not keep_left or not keep_right: return False # We are a matrix-matrix product, but we need to copy data return True def _parse_einsum_input(operands): """ A reproduction of einsum c side einsum parsing in python. Returns ------- input_strings : str Parsed input strings output_string : str Parsed output string operands : list of array_like The operands to use in the numpy contraction Examples -------- The operand list is simplified to reduce printing: >>> np.random.seed(123) >>> a = np.random.rand(4, 4) >>> b = np.random.rand(4, 4, 4) >>> _parse_einsum_input(('...a,...a->...', a, b)) ('za,xza', 'xz', [a, b]) # may vary >>> _parse_einsum_input((a, [Ellipsis, 0], b, [Ellipsis, 0])) ('za,xza', 'xz', [a, b]) # may vary """ if len(operands) == 0: raise ValueError("No input operands") if isinstance(operands[0], str): subscripts = operands[0].replace(" ", "") operands = [asanyarray(v) for v in operands[1:]] # Ensure all characters are valid for s in subscripts: if s in '.,->': continue if s not in einsum_symbols: raise ValueError("Character %s is not a valid symbol." % s) else: tmp_operands = list(operands) operand_list = [] subscript_list = [] for p in range(len(operands) // 2): operand_list.append(tmp_operands.pop(0)) subscript_list.append(tmp_operands.pop(0)) output_list = tmp_operands[-1] if len(tmp_operands) else None operands = [asanyarray(v) for v in operand_list] subscripts = "" last = len(subscript_list) - 1 for num, sub in enumerate(subscript_list): for s in sub: if s is Ellipsis: subscripts += "..." elif isinstance(s, int): subscripts += einsum_symbols[s] else: raise TypeError("For this input type lists must contain " "either int or Ellipsis") if num != last: subscripts += "," if output_list is not None: subscripts += "->" for s in output_list: if s is Ellipsis: subscripts += "..." elif isinstance(s, int): subscripts += einsum_symbols[s] else: raise TypeError("For this input type lists must contain " "either int or Ellipsis") # Check for proper "->" if ("-" in subscripts) or (">" in subscripts): invalid = (subscripts.count("-") > 1) or (subscripts.count(">") > 1) if invalid or (subscripts.count("->") != 1): raise ValueError("Subscripts can only contain one '->'.") # Parse ellipses if "." in subscripts: used = subscripts.replace(".", "").replace(",", "").replace("->", "") unused = list(einsum_symbols_set - set(used)) ellipse_inds = "".join(unused) longest = 0 if "->" in subscripts: input_tmp, output_sub = subscripts.split("->") split_subscripts = input_tmp.split(",") out_sub = True else: split_subscripts = subscripts.split(',') out_sub = False for num, sub in enumerate(split_subscripts): if "." in sub: if (sub.count(".") != 3) or (sub.count("...") != 1): raise ValueError("Invalid Ellipses.") # Take into account numerical values if operands[num].shape == (): ellipse_count = 0 else: ellipse_count = max(operands[num].ndim, 1) ellipse_count -= (len(sub) - 3) if ellipse_count > longest: longest = ellipse_count if ellipse_count < 0: raise ValueError("Ellipses lengths do not match.") elif ellipse_count == 0: split_subscripts[num] = sub.replace('...', '') else: rep_inds = ellipse_inds[-ellipse_count:] split_subscripts[num] = sub.replace('...', rep_inds) subscripts = ",".join(split_subscripts) if longest == 0: out_ellipse = "" else: out_ellipse = ellipse_inds[-longest:] if out_sub: subscripts += "->" + output_sub.replace("...", out_ellipse) else: # Special care for outputless ellipses output_subscript = "" tmp_subscripts = subscripts.replace(",", "") for s in sorted(set(tmp_subscripts)): if s not in (einsum_symbols): raise ValueError("Character %s is not a valid symbol." % s) if tmp_subscripts.count(s) == 1: output_subscript += s normal_inds = ''.join(sorted(set(output_subscript) - set(out_ellipse))) subscripts += "->" + out_ellipse + normal_inds # Build output string if does not exist if "->" in subscripts: input_subscripts, output_subscript = subscripts.split("->") else: input_subscripts = subscripts # Build output subscripts tmp_subscripts = subscripts.replace(",", "") output_subscript = "" for s in sorted(set(tmp_subscripts)): if s not in einsum_symbols: raise ValueError("Character %s is not a valid symbol." % s) if tmp_subscripts.count(s) == 1: output_subscript += s # Make sure output subscripts are in the input for char in output_subscript: if char not in input_subscripts: raise ValueError("Output character %s did not appear in the input" % char) # Make sure number operands is equivalent to the number of terms if len(input_subscripts.split(',')) != len(operands): raise ValueError("Number of einsum subscripts must be equal to the " "number of operands.") return (input_subscripts, output_subscript, operands) def _einsum_path_dispatcher(*operands, optimize=None, einsum_call=None): # NOTE: technically, we should only dispatch on array-like arguments, not # subscripts (given as strings). But separating operands into # arrays/subscripts is a little tricky/slow (given einsum's two supported # signatures), so as a practical shortcut we dispatch on everything. # Strings will be ignored for dispatching since they don't define # __array_function__. return operands @array_function_dispatch(_einsum_path_dispatcher, module='numpy') def einsum_path(*operands, optimize='greedy', einsum_call=False): """ einsum_path(subscripts, *operands, optimize='greedy') Evaluates the lowest cost contraction order for an einsum expression by considering the creation of intermediate arrays. Parameters ---------- subscripts : str Specifies the subscripts for summation. *operands : list of array_like These are the arrays for the operation. optimize : {bool, list, tuple, 'greedy', 'optimal'} Choose the type of path. If a tuple is provided, the second argument is assumed to be the maximum intermediate size created. If only a single argument is provided the largest input or output array size is used as a maximum intermediate size. * if a list is given that starts with ``einsum_path``, uses this as the contraction path * if False no optimization is taken * if True defaults to the 'greedy' algorithm * 'optimal' An algorithm that combinatorially explores all possible ways of contracting the listed tensors and choosest the least costly path. Scales exponentially with the number of terms in the contraction. * 'greedy' An algorithm that chooses the best pair contraction at each step. Effectively, this algorithm searches the largest inner, Hadamard, and then outer products at each step. Scales cubically with the number of terms in the contraction. Equivalent to the 'optimal' path for most contractions. Default is 'greedy'. Returns ------- path : list of tuples A list representation of the einsum path. string_repr : str A printable representation of the einsum path. Notes ----- The resulting path indicates which terms of the input contraction should be contracted first, the result of this contraction is then appended to the end of the contraction list. This list can then be iterated over until all intermediate contractions are complete. See Also -------- einsum, linalg.multi_dot Examples -------- We can begin with a chain dot example. In this case, it is optimal to contract the ``b`` and ``c`` tensors first as represented by the first element of the path ``(1, 2)``. The resulting tensor is added to the end of the contraction and the remaining contraction ``(0, 1)`` is then completed. >>> np.random.seed(123) >>> a = np.random.rand(2, 2) >>> b = np.random.rand(2, 5) >>> c = np.random.rand(5, 2) >>> path_info = np.einsum_path('ij,jk,kl->il', a, b, c, optimize='greedy') >>> print(path_info[0]) ['einsum_path', (1, 2), (0, 1)] >>> print(path_info[1]) Complete contraction: ij,jk,kl->il # may vary Naive scaling: 4 Optimized scaling: 3 Naive FLOP count: 1.600e+02 Optimized FLOP count: 5.600e+01 Theoretical speedup: 2.857 Largest intermediate: 4.000e+00 elements ------------------------------------------------------------------------- scaling current remaining ------------------------------------------------------------------------- 3 kl,jk->jl ij,jl->il 3 jl,ij->il il->il A more complex index transformation example. >>> I = np.random.rand(10, 10, 10, 10) >>> C = np.random.rand(10, 10) >>> path_info = np.einsum_path('ea,fb,abcd,gc,hd->efgh', C, C, I, C, C, ... optimize='greedy') >>> print(path_info[0]) ['einsum_path', (0, 2), (0, 3), (0, 2), (0, 1)] >>> print(path_info[1]) Complete contraction: ea,fb,abcd,gc,hd->efgh # may vary Naive scaling: 8 Optimized scaling: 5 Naive FLOP count: 8.000e+08 Optimized FLOP count: 8.000e+05 Theoretical speedup: 1000.000 Largest intermediate: 1.000e+04 elements -------------------------------------------------------------------------- scaling current remaining -------------------------------------------------------------------------- 5 abcd,ea->bcde fb,gc,hd,bcde->efgh 5 bcde,fb->cdef gc,hd,cdef->efgh 5 cdef,gc->defg hd,defg->efgh 5 defg,hd->efgh efgh->efgh """ # Figure out what the path really is path_type = optimize if path_type is True: path_type = 'greedy' if path_type is None: path_type = False memory_limit = None # No optimization or a named path algorithm if (path_type is False) or isinstance(path_type, str): pass # Given an explicit path elif len(path_type) and (path_type[0] == 'einsum_path'): pass # Path tuple with memory limit elif ((len(path_type) == 2) and isinstance(path_type[0], str) and isinstance(path_type[1], (int, float))): memory_limit = int(path_type[1]) path_type = path_type[0] else: raise TypeError("Did not understand the path: %s" % str(path_type)) # Hidden option, only einsum should call this einsum_call_arg = einsum_call # Python side parsing input_subscripts, output_subscript, operands = _parse_einsum_input(operands) # Build a few useful list and sets input_list = input_subscripts.split(',') input_sets = [set(x) for x in input_list] output_set = set(output_subscript) indices = set(input_subscripts.replace(',', '')) # Get length of each unique dimension and ensure all dimensions are correct dimension_dict = {} broadcast_indices = [[] for x in range(len(input_list))] for tnum, term in enumerate(input_list): sh = operands[tnum].shape if len(sh) != len(term): raise ValueError("Einstein sum subscript %s does not contain the " "correct number of indices for operand %d." % (input_subscripts[tnum], tnum)) for cnum, char in enumerate(term): dim = sh[cnum] # Build out broadcast indices if dim == 1: broadcast_indices[tnum].append(char) if char in dimension_dict.keys(): # For broadcasting cases we always want the largest dim size if dimension_dict[char] == 1: dimension_dict[char] = dim elif dim not in (1, dimension_dict[char]): raise ValueError("Size of label '%s' for operand %d (%d) " "does not match previous terms (%d)." % (char, tnum, dimension_dict[char], dim)) else: dimension_dict[char] = dim # Convert broadcast inds to sets broadcast_indices = [set(x) for x in broadcast_indices] # Compute size of each input array plus the output array size_list = [_compute_size_by_dict(term, dimension_dict) for term in input_list + [output_subscript]] max_size = max(size_list) if memory_limit is None: memory_arg = max_size else: memory_arg = memory_limit # Compute naive cost # This isn't quite right, need to look into exactly how einsum does this inner_product = (sum(len(x) for x in input_sets) - len(indices)) > 0 naive_cost = _flop_count(indices, inner_product, len(input_list), dimension_dict) # Compute the path if (path_type is False) or (len(input_list) in [1, 2]) or (indices == output_set): # Nothing to be optimized, leave it to einsum path = [tuple(range(len(input_list)))] elif path_type == "greedy": path = _greedy_path(input_sets, output_set, dimension_dict, memory_arg) elif path_type == "optimal": path = _optimal_path(input_sets, output_set, dimension_dict, memory_arg) elif path_type[0] == 'einsum_path': path = path_type[1:] else: raise KeyError("Path name %s not found", path_type) cost_list, scale_list, size_list, contraction_list = [], [], [], [] # Build contraction tuple (positions, gemm, einsum_str, remaining) for cnum, contract_inds in enumerate(path): # Make sure we remove inds from right to left contract_inds = tuple(sorted(list(contract_inds), reverse=True)) contract = _find_contraction(contract_inds, input_sets, output_set) out_inds, input_sets, idx_removed, idx_contract = contract cost = _flop_count(idx_contract, idx_removed, len(contract_inds), dimension_dict) cost_list.append(cost) scale_list.append(len(idx_contract)) size_list.append(_compute_size_by_dict(out_inds, dimension_dict)) bcast = set() tmp_inputs = [] for x in contract_inds: tmp_inputs.append(input_list.pop(x)) bcast |= broadcast_indices.pop(x) new_bcast_inds = bcast - idx_removed # If we're broadcasting, nix blas if not len(idx_removed & bcast): do_blas = _can_dot(tmp_inputs, out_inds, idx_removed) else: do_blas = False # Last contraction if (cnum - len(path)) == -1: idx_result = output_subscript else: sort_result = [(dimension_dict[ind], ind) for ind in out_inds] idx_result = "".join([x[1] for x in sorted(sort_result)]) input_list.append(idx_result) broadcast_indices.append(new_bcast_inds) einsum_str = ",".join(tmp_inputs) + "->" + idx_result contraction = (contract_inds, idx_removed, einsum_str, input_list[:], do_blas) contraction_list.append(contraction) opt_cost = sum(cost_list) + 1 if einsum_call_arg: return (operands, contraction_list) # Return the path along with a nice string representation overall_contraction = input_subscripts + "->" + output_subscript header = ("scaling", "current", "remaining") speedup = naive_cost / opt_cost max_i = max(size_list) path_print = " Complete contraction: %s\n" % overall_contraction path_print += " Naive scaling: %d\n" % len(indices) path_print += " Optimized scaling: %d\n" % max(scale_list) path_print += " Naive FLOP count: %.3e\n" % naive_cost path_print += " Optimized FLOP count: %.3e\n" % opt_cost path_print += " Theoretical speedup: %3.3f\n" % speedup path_print += " Largest intermediate: %.3e elements\n" % max_i path_print += "-" * 74 + "\n" path_print += "%6s %24s %40s\n" % header path_print += "-" * 74 for n, contraction in enumerate(contraction_list): inds, idx_rm, einsum_str, remaining, blas = contraction remaining_str = ",".join(remaining) + "->" + output_subscript path_run = (scale_list[n], einsum_str, remaining_str) path_print += "\n%4d %24s %40s" % path_run path = ['einsum_path'] + path return (path, path_print) def _einsum_dispatcher(*operands, out=None, optimize=None, **kwargs): # Arguably we dispatch on more arguments that we really should; see note in # _einsum_path_dispatcher for why. yield from operands yield out # Rewrite einsum to handle different cases @array_function_dispatch(_einsum_dispatcher, module='numpy') def einsum(*operands, out=None, optimize=False, **kwargs): """ einsum(subscripts, *operands, out=None, dtype=None, order='K', casting='safe', optimize=False) Evaluates the Einstein summation convention on the operands. Using the Einstein summation convention, many common multi-dimensional, linear algebraic array operations can be represented in a simple fashion. In *implicit* mode `einsum` computes these values. In *explicit* mode, `einsum` provides further flexibility to compute other array operations that might not be considered classical Einstein summation operations, by disabling, or forcing summation over specified subscript labels. See the notes and examples for clarification. Parameters ---------- subscripts : str Specifies the subscripts for summation as comma separated list of subscript labels. An implicit (classical Einstein summation) calculation is performed unless the explicit indicator '->' is included as well as subscript labels of the precise output form. operands : list of array_like These are the arrays for the operation. out : ndarray, optional If provided, the calculation is done into this array. dtype : {data-type, None}, optional If provided, forces the calculation to use the data type specified. Note that you may have to also give a more liberal `casting` parameter to allow the conversions. Default is None. order : {'C', 'F', 'A', 'K'}, optional Controls the memory layout of the output. 'C' means it should be C contiguous. 'F' means it should be Fortran contiguous, 'A' means it should be 'F' if the inputs are all 'F', 'C' otherwise. 'K' means it should be as close to the layout as the inputs as is possible, including arbitrarily permuted axes. Default is 'K'. casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional Controls what kind of data casting may occur. Setting this to 'unsafe' is not recommended, as it can adversely affect accumulations. * 'no' means the data types should not be cast at all. * 'equiv' means only byte-order changes are allowed. * 'safe' means only casts which can preserve values are allowed. * 'same_kind' means only safe casts or casts within a kind, like float64 to float32, are allowed. * 'unsafe' means any data conversions may be done. Default is 'safe'. optimize : {False, True, 'greedy', 'optimal'}, optional Controls if intermediate optimization should occur. No optimization will occur if False and True will default to the 'greedy' algorithm. Also accepts an explicit contraction list from the ``np.einsum_path`` function. See ``np.einsum_path`` for more details. Defaults to False. Returns ------- output : ndarray The calculation based on the Einstein summation convention. See Also -------- einsum_path, dot, inner, outer, tensordot, linalg.multi_dot Notes ----- .. versionadded:: 1.6.0 The Einstein summation convention can be used to compute many multi-dimensional, linear algebraic array operations. `einsum` provides a succinct way of representing these. A non-exhaustive list of these operations, which can be computed by `einsum`, is shown below along with examples: * Trace of an array, :py:func:`numpy.trace`. * Return a diagonal, :py:func:`numpy.diag`. * Array axis summations, :py:func:`numpy.sum`. * Transpositions and permutations, :py:func:`numpy.transpose`. * Matrix multiplication and dot product, :py:func:`numpy.matmul` :py:func:`numpy.dot`. * Vector inner and outer products, :py:func:`numpy.inner` :py:func:`numpy.outer`. * Broadcasting, element-wise and scalar multiplication, :py:func:`numpy.multiply`. * Tensor contractions, :py:func:`numpy.tensordot`. * Chained array operations, in efficient calculation order, :py:func:`numpy.einsum_path`. The subscripts string is a comma-separated list of subscript labels, where each label refers to a dimension of the corresponding operand. Whenever a label is repeated it is summed, so ``np.einsum('i,i', a, b)`` is equivalent to :py:func:`np.inner(a,b) <numpy.inner>`. If a label appears only once, it is not summed, so ``np.einsum('i', a)`` produces a view of ``a`` with no changes. A further example ``np.einsum('ij,jk', a, b)`` describes traditional matrix multiplication and is equivalent to :py:func:`np.matmul(a,b) <numpy.matmul>`. Repeated subscript labels in one operand take the diagonal. For example, ``np.einsum('ii', a)`` is equivalent to :py:func:`np.trace(a) <numpy.trace>`. In *implicit mode*, the chosen subscripts are important since the axes of the output are reordered alphabetically. This means that ``np.einsum('ij', a)`` doesn't affect a 2D array, while ``np.einsum('ji', a)`` takes its transpose. Additionally, ``np.einsum('ij,jk', a, b)`` returns a matrix multiplication, while, ``np.einsum('ij,jh', a, b)`` returns the transpose of the multiplication since subscript 'h' precedes subscript 'i'. In *explicit mode* the output can be directly controlled by specifying output subscript labels. This requires the identifier '->' as well as the list of output subscript labels. This feature increases the flexibility of the function since summing can be disabled or forced when required. The call ``np.einsum('i->', a)`` is like :py:func:`np.sum(a, axis=-1) <numpy.sum>`, and ``np.einsum('ii->i', a)`` is like :py:func:`np.diag(a) <numpy.diag>`. The difference is that `einsum` does not allow broadcasting by default. Additionally ``np.einsum('ij,jh->ih', a, b)`` directly specifies the order of the output subscript labels and therefore returns matrix multiplication, unlike the example above in implicit mode. To enable and control broadcasting, use an ellipsis. Default NumPy-style broadcasting is done by adding an ellipsis to the left of each term, like ``np.einsum('...ii->...i', a)``. To take the trace along the first and last axes, you can do ``np.einsum('i...i', a)``, or to do a matrix-matrix product with the left-most indices instead of rightmost, one can do ``np.einsum('ij...,jk...->ik...', a, b)``. When there is only one operand, no axes are summed, and no output parameter is provided, a view into the operand is returned instead of a new array. Thus, taking the diagonal as ``np.einsum('ii->i', a)`` produces a view (changed in version 1.10.0). `einsum` also provides an alternative way to provide the subscripts and operands as ``einsum(op0, sublist0, op1, sublist1, ..., [sublistout])``. If the output shape is not provided in this format `einsum` will be calculated in implicit mode, otherwise it will be performed explicitly. The examples below have corresponding `einsum` calls with the two parameter methods. .. versionadded:: 1.10.0 Views returned from einsum are now writeable whenever the input array is writeable. For example, ``np.einsum('ijk...->kji...', a)`` will now have the same effect as :py:func:`np.swapaxes(a, 0, 2) <numpy.swapaxes>` and ``np.einsum('ii->i', a)`` will return a writeable view of the diagonal of a 2D array. .. versionadded:: 1.12.0 Added the ``optimize`` argument which will optimize the contraction order of an einsum expression. For a contraction with three or more operands this can greatly increase the computational efficiency at the cost of a larger memory footprint during computation. Typically a 'greedy' algorithm is applied which empirical tests have shown returns the optimal path in the majority of cases. In some cases 'optimal' will return the superlative path through a more expensive, exhaustive search. For iterative calculations it may be advisable to calculate the optimal path once and reuse that path by supplying it as an argument. An example is given below. See :py:func:`numpy.einsum_path` for more details. Examples -------- >>> a = np.arange(25).reshape(5,5) >>> b = np.arange(5) >>> c = np.arange(6).reshape(2,3) Trace of a matrix: >>> np.einsum('ii', a) 60 >>> np.einsum(a, [0,0]) 60 >>> np.trace(a) 60 Extract the diagonal (requires explicit form): >>> np.einsum('ii->i', a) array([ 0, 6, 12, 18, 24]) >>> np.einsum(a, [0,0], [0]) array([ 0, 6, 12, 18, 24]) >>> np.diag(a) array([ 0, 6, 12, 18, 24]) Sum over an axis (requires explicit form): >>> np.einsum('ij->i', a) array([ 10, 35, 60, 85, 110]) >>> np.einsum(a, [0,1], [0]) array([ 10, 35, 60, 85, 110]) >>> np.sum(a, axis=1) array([ 10, 35, 60, 85, 110]) For higher dimensional arrays summing a single axis can be done with ellipsis: >>> np.einsum('...j->...', a) array([ 10, 35, 60, 85, 110]) >>> np.einsum(a, [Ellipsis,1], [Ellipsis]) array([ 10, 35, 60, 85, 110]) Compute a matrix transpose, or reorder any number of axes: >>> np.einsum('ji', c) array([[0, 3], [1, 4], [2, 5]]) >>> np.einsum('ij->ji', c) array([[0, 3], [1, 4], [2, 5]]) >>> np.einsum(c, [1,0]) array([[0, 3], [1, 4], [2, 5]]) >>> np.transpose(c) array([[0, 3], [1, 4], [2, 5]]) Vector inner products: >>> np.einsum('i,i', b, b) 30 >>> np.einsum(b, [0], b, [0]) 30 >>> np.inner(b,b) 30 Matrix vector multiplication: >>> np.einsum('ij,j', a, b) array([ 30, 80, 130, 180, 230]) >>> np.einsum(a, [0,1], b, [1]) array([ 30, 80, 130, 180, 230]) >>> np.dot(a, b) array([ 30, 80, 130, 180, 230]) >>> np.einsum('...j,j', a, b) array([ 30, 80, 130, 180, 230]) Broadcasting and scalar multiplication: >>> np.einsum('..., ...', 3, c) array([[ 0, 3, 6], [ 9, 12, 15]]) >>> np.einsum(',ij', 3, c) array([[ 0, 3, 6], [ 9, 12, 15]]) >>> np.einsum(3, [Ellipsis], c, [Ellipsis]) array([[ 0, 3, 6], [ 9, 12, 15]]) >>> np.multiply(3, c) array([[ 0, 3, 6], [ 9, 12, 15]]) Vector outer product: >>> np.einsum('i,j', np.arange(2)+1, b) array([[0, 1, 2, 3, 4], [0, 2, 4, 6, 8]]) >>> np.einsum(np.arange(2)+1, [0], b, [1]) array([[0, 1, 2, 3, 4], [0, 2, 4, 6, 8]]) >>> np.outer(np.arange(2)+1, b) array([[0, 1, 2, 3, 4], [0, 2, 4, 6, 8]]) Tensor contraction: >>> a = np.arange(60.).reshape(3,4,5) >>> b = np.arange(24.).reshape(4,3,2) >>> np.einsum('ijk,jil->kl', a, b) array([[4400., 4730.], [4532., 4874.], [4664., 5018.], [4796., 5162.], [4928., 5306.]]) >>> np.einsum(a, [0,1,2], b, [1,0,3], [2,3]) array([[4400., 4730.], [4532., 4874.], [4664., 5018.], [4796., 5162.], [4928., 5306.]]) >>> np.tensordot(a,b, axes=([1,0],[0,1])) array([[4400., 4730.], [4532., 4874.], [4664., 5018.], [4796., 5162.], [4928., 5306.]]) Writeable returned arrays (since version 1.10.0): >>> a = np.zeros((3, 3)) >>> np.einsum('ii->i', a)[:] = 1 >>> a array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) Example of ellipsis use: >>> a = np.arange(6).reshape((3,2)) >>> b = np.arange(12).reshape((4,3)) >>> np.einsum('ki,jk->ij', a, b) array([[10, 28, 46, 64], [13, 40, 67, 94]]) >>> np.einsum('ki,...k->i...', a, b) array([[10, 28, 46, 64], [13, 40, 67, 94]]) >>> np.einsum('k...,jk', a, b) array([[10, 28, 46, 64], [13, 40, 67, 94]]) Chained array operations. For more complicated contractions, speed ups might be achieved by repeatedly computing a 'greedy' path or pre-computing the 'optimal' path and repeatedly applying it, using an `einsum_path` insertion (since version 1.12.0). Performance improvements can be particularly significant with larger arrays: >>> a = np.ones(64).reshape(2,4,8) Basic `einsum`: ~1520ms (benchmarked on 3.1GHz Intel i5.) >>> for iteration in range(500): ... _ = np.einsum('ijk,ilm,njm,nlk,abc->',a,a,a,a,a) Sub-optimal `einsum` (due to repeated path calculation time): ~330ms >>> for iteration in range(500): ... _ = np.einsum('ijk,ilm,njm,nlk,abc->',a,a,a,a,a, optimize='optimal') Greedy `einsum` (faster optimal path approximation): ~160ms >>> for iteration in range(500): ... _ = np.einsum('ijk,ilm,njm,nlk,abc->',a,a,a,a,a, optimize='greedy') Optimal `einsum` (best usage pattern in some use cases): ~110ms >>> path = np.einsum_path('ijk,ilm,njm,nlk,abc->',a,a,a,a,a, optimize='optimal')[0] >>> for iteration in range(500): ... _ = np.einsum('ijk,ilm,njm,nlk,abc->',a,a,a,a,a, optimize=path) """ # Special handling if out is specified specified_out = out is not None # If no optimization, run pure einsum if optimize is False: if specified_out: kwargs['out'] = out return c_einsum(*operands, **kwargs) # Check the kwargs to avoid a more cryptic error later, without having to # repeat default values here valid_einsum_kwargs = ['dtype', 'order', 'casting'] unknown_kwargs = [k for (k, v) in kwargs.items() if k not in valid_einsum_kwargs] if len(unknown_kwargs): raise TypeError("Did not understand the following kwargs: %s" % unknown_kwargs) # Build the contraction list and operand operands, contraction_list = einsum_path(*operands, optimize=optimize, einsum_call=True) # Start contraction loop for num, contraction in enumerate(contraction_list): inds, idx_rm, einsum_str, remaining, blas = contraction tmp_operands = [operands.pop(x) for x in inds] # Do we need to deal with the output? handle_out = specified_out and ((num + 1) == len(contraction_list)) # Call tensordot if still possible if blas: # Checks have already been handled input_str, results_index = einsum_str.split('->') input_left, input_right = input_str.split(',') tensor_result = input_left + input_right for s in idx_rm: tensor_result = tensor_result.replace(s, "") # Find indices to contract over left_pos, right_pos = [], [] for s in sorted(idx_rm): left_pos.append(input_left.find(s)) right_pos.append(input_right.find(s)) # Contract! new_view = tensordot(*tmp_operands, axes=(tuple(left_pos), tuple(right_pos))) # Build a new view if needed if (tensor_result != results_index) or handle_out: if handle_out: kwargs["out"] = out new_view = c_einsum(tensor_result + '->' + results_index, new_view, **kwargs) # Call einsum else: # If out was specified if handle_out: kwargs["out"] = out # Do the contraction new_view = c_einsum(einsum_str, *tmp_operands, **kwargs) # Append new items and dereference what we can operands.append(new_view) del tmp_operands, new_view if specified_out: return out else: return operands[0]
# doctest r''' Test the .npy file format. Set up: >>> import sys >>> from io import BytesIO >>> from numpy.lib import format >>> >>> scalars = [ ... np.uint8, ... np.int8, ... np.uint16, ... np.int16, ... np.uint32, ... np.int32, ... np.uint64, ... np.int64, ... np.float32, ... np.float64, ... np.complex64, ... np.complex128, ... object, ... ] >>> >>> basic_arrays = [] >>> >>> for scalar in scalars: ... for endian in '<>': ... dtype = np.dtype(scalar).newbyteorder(endian) ... basic = np.arange(15).astype(dtype) ... basic_arrays.extend([ ... np.array([], dtype=dtype), ... np.array(10, dtype=dtype), ... basic, ... basic.reshape((3,5)), ... basic.reshape((3,5)).T, ... basic.reshape((3,5))[::-1,::2], ... ]) ... >>> >>> Pdescr = [ ... ('x', 'i4', (2,)), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> PbufferT = [ ... ([3,2], [[6.,4.],[6.,4.]], 8), ... ([4,3], [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> Ndescr = [ ... ('x', 'i4', (2,)), ... ('Info', [ ... ('value', 'c16'), ... ('y2', 'f8'), ... ('Info2', [ ... ('name', 'S2'), ... ('value', 'c16', (2,)), ... ('y3', 'f8', (2,)), ... ('z3', 'u4', (2,))]), ... ('name', 'S2'), ... ('z2', 'b1')]), ... ('color', 'S2'), ... ('info', [ ... ('Name', 'U8'), ... ('Value', 'c16')]), ... ('y', 'f8', (2, 2)), ... ('z', 'u1')] >>> >>> >>> NbufferT = [ ... ([3,2], (6j, 6., ('nn', [6j,4j], [6.,4.], [1,2]), 'NN', True), 'cc', ('NN', 6j), [[6.,4.],[6.,4.]], 8), ... ([4,3], (7j, 7., ('oo', [7j,5j], [7.,5.], [2,1]), 'OO', False), 'dd', ('OO', 7j), [[7.,5.],[7.,5.]], 9), ... ] >>> >>> >>> record_arrays = [ ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), ... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), ... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), ... ] Test the magic string writing. >>> format.magic(1, 0) '\x93NUMPY\x01\x00' >>> format.magic(0, 0) '\x93NUMPY\x00\x00' >>> format.magic(255, 255) '\x93NUMPY\xff\xff' >>> format.magic(2, 5) '\x93NUMPY\x02\x05' Test the magic string reading. >>> format.read_magic(BytesIO(format.magic(1, 0))) (1, 0) >>> format.read_magic(BytesIO(format.magic(0, 0))) (0, 0) >>> format.read_magic(BytesIO(format.magic(255, 255))) (255, 255) >>> format.read_magic(BytesIO(format.magic(2, 5))) (2, 5) Test the header writing. >>> for arr in basic_arrays + record_arrays: ... f = BytesIO() ... format.write_array_header_1_0(f, arr) # XXX: arr is not a dict, items gets called on it ... print(repr(f.getvalue())) ... "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i2', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>u8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>i8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f4', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>f8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c8', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '<c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': '>c16', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (0,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': ()} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (15,)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 5)} \n" "F\x00{'descr': 'O', 'fortran_order': True, 'shape': (5, 3)} \n" "F\x00{'descr': 'O', 'fortran_order': False, 'shape': (3, 3)} \n" "v\x00{'descr': [('x', '<i4', (2,)), ('y', '<f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '<i4', (2,)),\n ('Info',\n [('value', '<c16'),\n ('y2', '<f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '<c16', (2,)),\n ('y3', '<f8', (2,)),\n ('z3', '<u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '<U8'), ('Value', '<c16')]),\n ('y', '<f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "v\x00{'descr': [('x', '>i4', (2,)), ('y', '>f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" "\x16\x02{'descr': [('x', '>i4', (2,)),\n ('Info',\n [('value', '>c16'),\n ('y2', '>f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '>c16', (2,)),\n ('y3', '>f8', (2,)),\n ('z3', '>u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '>U8'), ('Value', '>c16')]),\n ('y', '>f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" ''' import sys import os import shutil import tempfile import warnings import pytest from io import BytesIO import numpy as np from numpy.testing import ( assert_, assert_array_equal, assert_raises, assert_raises_regex, assert_warns ) from numpy.lib import format tempdir = None # Module-level setup. def setup_module(): global tempdir tempdir = tempfile.mkdtemp() def teardown_module(): global tempdir if tempdir is not None and os.path.isdir(tempdir): shutil.rmtree(tempdir) tempdir = None # Generate some basic arrays to test with. scalars = [ np.uint8, np.int8, np.uint16, np.int16, np.uint32, np.int32, np.uint64, np.int64, np.float32, np.float64, np.complex64, np.complex128, object, ] basic_arrays = [] for scalar in scalars: for endian in '<>': dtype = np.dtype(scalar).newbyteorder(endian) basic = np.arange(1500).astype(dtype) basic_arrays.extend([ # Empty np.array([], dtype=dtype), # Rank-0 np.array(10, dtype=dtype), # 1-D basic, # 2-D C-contiguous basic.reshape((30, 50)), # 2-D F-contiguous basic.reshape((30, 50)).T, # 2-D non-contiguous basic.reshape((30, 50))[::-1, ::2], ]) # More complicated record arrays. # This is the structure of the table used for plain objects: # # +-+-+-+ # |x|y|z| # +-+-+-+ # Structure of a plain array description: Pdescr = [ ('x', 'i4', (2,)), ('y', 'f8', (2, 2)), ('z', 'u1')] # A plain list of tuples with values for testing: PbufferT = [ # x y z ([3, 2], [[6., 4.], [6., 4.]], 8), ([4, 3], [[7., 5.], [7., 5.]], 9), ] # This is the structure of the table used for nested objects (DON'T PANIC!): # # +-+---------------------------------+-----+----------+-+-+ # |x|Info |color|info |y|z| # | +-----+--+----------------+----+--+ +----+-----+ | | # | |value|y2|Info2 |name|z2| |Name|Value| | | # | | | +----+-----+--+--+ | | | | | | | # | | | |name|value|y3|z3| | | | | | | | # +-+-----+--+----+-----+--+--+----+--+-----+----+-----+-+-+ # # The corresponding nested array description: Ndescr = [ ('x', 'i4', (2,)), ('Info', [ ('value', 'c16'), ('y2', 'f8'), ('Info2', [ ('name', 'S2'), ('value', 'c16', (2,)), ('y3', 'f8', (2,)), ('z3', 'u4', (2,))]), ('name', 'S2'), ('z2', 'b1')]), ('color', 'S2'), ('info', [ ('Name', 'U8'), ('Value', 'c16')]), ('y', 'f8', (2, 2)), ('z', 'u1')] NbufferT = [ # x Info color info y z # value y2 Info2 name z2 Name Value # name value y3 z3 ([3, 2], (6j, 6., ('nn', [6j, 4j], [6., 4.], [1, 2]), 'NN', True), 'cc', ('NN', 6j), [[6., 4.], [6., 4.]], 8), ([4, 3], (7j, 7., ('oo', [7j, 5j], [7., 5.], [2, 1]), 'OO', False), 'dd', ('OO', 7j), [[7., 5.], [7., 5.]], 9), ] record_arrays = [ np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')), np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')), np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')), np.zeros(1, dtype=[('c', ('<f8', (5,)), (2,))]) ] #BytesIO that reads a random number of bytes at a time class BytesIOSRandomSize(BytesIO): def read(self, size=None): import random size = random.randint(1, size) return super(BytesIOSRandomSize, self).read(size) def roundtrip(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIO(f.getvalue()) arr2 = format.read_array(f2, allow_pickle=True) return arr2 def roundtrip_randsize(arr): f = BytesIO() format.write_array(f, arr) f2 = BytesIOSRandomSize(f.getvalue()) arr2 = format.read_array(f2) return arr2 def roundtrip_truncated(arr): f = BytesIO() format.write_array(f, arr) #BytesIO is one byte short f2 = BytesIO(f.getvalue()[0:-1]) arr2 = format.read_array(f2) return arr2 def assert_equal_(o1, o2): assert_(o1 == o2) def test_roundtrip(): for arr in basic_arrays + record_arrays: arr2 = roundtrip(arr) assert_array_equal(arr, arr2) def test_roundtrip_randsize(): for arr in basic_arrays + record_arrays: if arr.dtype != object: arr2 = roundtrip_randsize(arr) assert_array_equal(arr, arr2) def test_roundtrip_truncated(): for arr in basic_arrays: if arr.dtype != object: assert_raises(ValueError, roundtrip_truncated, arr) def test_long_str(): # check items larger than internal buffer size, gh-4027 long_str_arr = np.ones(1, dtype=np.dtype((str, format.BUFFER_SIZE + 1))) long_str_arr2 = roundtrip(long_str_arr) assert_array_equal(long_str_arr, long_str_arr2) @pytest.mark.slow def test_memmap_roundtrip(): # Fixme: used to crash on windows if not (sys.platform == 'win32' or sys.platform == 'cygwin'): for arr in basic_arrays + record_arrays: if arr.dtype.hasobject: # Skip these since they can't be mmap'ed. continue # Write it out normally and through mmap. nfn = os.path.join(tempdir, 'normal.npy') mfn = os.path.join(tempdir, 'memmap.npy') fp = open(nfn, 'wb') try: format.write_array(fp, arr) finally: fp.close() fortran_order = ( arr.flags.f_contiguous and not arr.flags.c_contiguous) ma = format.open_memmap(mfn, mode='w+', dtype=arr.dtype, shape=arr.shape, fortran_order=fortran_order) ma[...] = arr del ma # Check that both of these files' contents are the same. fp = open(nfn, 'rb') normal_bytes = fp.read() fp.close() fp = open(mfn, 'rb') memmap_bytes = fp.read() fp.close() assert_equal_(normal_bytes, memmap_bytes) # Check that reading the file using memmap works. ma = format.open_memmap(nfn, mode='r') del ma def test_compressed_roundtrip(): arr = np.random.rand(200, 200) npz_file = os.path.join(tempdir, 'compressed.npz') np.savez_compressed(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) # aligned dt1 = np.dtype('i1, i4, i1', align=True) # non-aligned, explicit offsets dt2 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6]}) # nested struct-in-struct dt3 = np.dtype({'names': ['c', 'd'], 'formats': ['i4', dt2]}) # field with '' name dt4 = np.dtype({'names': ['a', '', 'b'], 'formats': ['i4']*3}) # titles dt5 = np.dtype({'names': ['a', 'b'], 'formats': ['i4', 'i4'], 'offsets': [1, 6], 'titles': ['aa', 'bb']}) # empty dt6 = np.dtype({'names': [], 'formats': [], 'itemsize': 8}) @pytest.mark.parametrize("dt", [dt1, dt2, dt3, dt4, dt5, dt6]) def test_load_padded_dtype(dt): arr = np.zeros(3, dt) for i in range(3): arr[i] = i + 5 npz_file = os.path.join(tempdir, 'aligned.npz') np.savez(npz_file, arr=arr) arr1 = np.load(npz_file)['arr'] assert_array_equal(arr, arr1) def test_python2_python3_interoperability(): fname = 'win64python2.npy' path = os.path.join(os.path.dirname(__file__), 'data', fname) data = np.load(path) assert_array_equal(data, np.ones(2)) def test_pickle_python2_python3(): # Test that loading object arrays saved on Python 2 works both on # Python 2 and Python 3 and vice versa data_dir = os.path.join(os.path.dirname(__file__), 'data') expected = np.array([None, range, u'\u512a\u826f', b'\xe4\xb8\x8d\xe8\x89\xaf'], dtype=object) for fname in ['py2-objarr.npy', 'py2-objarr.npz', 'py3-objarr.npy', 'py3-objarr.npz']: path = os.path.join(data_dir, fname) for encoding in ['bytes', 'latin1']: data_f = np.load(path, allow_pickle=True, encoding=encoding) if fname.endswith('.npz'): data = data_f['x'] data_f.close() else: data = data_f if encoding == 'latin1' and fname.startswith('py2'): assert_(isinstance(data[3], str)) assert_array_equal(data[:-1], expected[:-1]) # mojibake occurs assert_array_equal(data[-1].encode(encoding), expected[-1]) else: assert_(isinstance(data[3], bytes)) assert_array_equal(data, expected) if fname.startswith('py2'): if fname.endswith('.npz'): data = np.load(path, allow_pickle=True) assert_raises(UnicodeError, data.__getitem__, 'x') data.close() data = np.load(path, allow_pickle=True, fix_imports=False, encoding='latin1') assert_raises(ImportError, data.__getitem__, 'x') data.close() else: assert_raises(UnicodeError, np.load, path, allow_pickle=True) assert_raises(ImportError, np.load, path, allow_pickle=True, fix_imports=False, encoding='latin1') def test_pickle_disallow(): data_dir = os.path.join(os.path.dirname(__file__), 'data') path = os.path.join(data_dir, 'py2-objarr.npy') assert_raises(ValueError, np.load, path, allow_pickle=False, encoding='latin1') path = os.path.join(data_dir, 'py2-objarr.npz') f = np.load(path, allow_pickle=False, encoding='latin1') assert_raises(ValueError, f.__getitem__, 'x') path = os.path.join(tempdir, 'pickle-disabled.npy') assert_raises(ValueError, np.save, path, np.array([None], dtype=object), allow_pickle=False) @pytest.mark.parametrize('dt', [ np.dtype(np.dtype([('a', np.int8), ('b', np.int16), ('c', np.int32), ], align=True), (3,)), np.dtype([('x', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8, }, (3,)), (4,), )]), np.dtype([('x', ('<f8', (5,)), (2,), )]), np.dtype([('x', np.dtype(( np.dtype(( np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8}), (3,) )), (4,) ))) ]), np.dtype([ ('a', np.dtype(( np.dtype(( np.dtype(( np.dtype([ ('a', int), ('b', np.dtype({'names':['a','b'], 'formats':['i1','i1'], 'offsets':[0,4], 'itemsize':8})), ]), (3,), )), (4,), )), (5,), ))) ]), ]) def test_descr_to_dtype(dt): dt1 = format.descr_to_dtype(dt.descr) assert_equal_(dt1, dt) arr1 = np.zeros(3, dt) arr2 = roundtrip(arr1) assert_array_equal(arr1, arr2) def test_version_2_0(): f = BytesIO() # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) format.write_array(f, d, version=(2, 0)) with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) format.write_array(f, d) assert_(w[0].category is UserWarning) # check alignment of data portion f.seek(0) header = f.readline() assert_(len(header) % format.ARRAY_ALIGN == 0) f.seek(0) n = format.read_array(f) assert_array_equal(d, n) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.write_array, f, d, (1, 0)) @pytest.mark.slow def test_version_2_0_memmap(): # requires more than 2 byte for header dt = [(("%d" % i) * 100, float) for i in range(500)] d = np.ones(1000, dtype=dt) tf = tempfile.mktemp('', 'mmap', dir=tempdir) # 1.0 requested but data cannot be saved this way assert_raises(ValueError, format.open_memmap, tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(1, 0)) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=(2, 0)) ma[...] = d del ma with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) ma = format.open_memmap(tf, mode='w+', dtype=d.dtype, shape=d.shape, version=None) assert_(w[0].category is UserWarning) ma[...] = d del ma ma = format.open_memmap(tf, mode='r') assert_array_equal(ma, d) def test_write_version(): f = BytesIO() arr = np.arange(1) # These should pass. format.write_array(f, arr, version=(1, 0)) format.write_array(f, arr) format.write_array(f, arr, version=None) format.write_array(f, arr) format.write_array(f, arr, version=(2, 0)) format.write_array(f, arr) # These should all fail. bad_versions = [ (1, 1), (0, 0), (0, 1), (2, 2), (255, 255), ] for version in bad_versions: with assert_raises_regex(ValueError, 'we only support format version.*'): format.write_array(f, arr, version=version) bad_version_magic = [ b'\x93NUMPY\x01\x01', b'\x93NUMPY\x00\x00', b'\x93NUMPY\x00\x01', b'\x93NUMPY\x02\x00', b'\x93NUMPY\x02\x02', b'\x93NUMPY\xff\xff', ] malformed_magic = [ b'\x92NUMPY\x01\x00', b'\x00NUMPY\x01\x00', b'\x93numpy\x01\x00', b'\x93MATLB\x01\x00', b'\x93NUMPY\x01', b'\x93NUMPY', b'', ] def test_read_magic(): s1 = BytesIO() s2 = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s1, arr, version=(1, 0)) format.write_array(s2, arr, version=(2, 0)) s1.seek(0) s2.seek(0) version1 = format.read_magic(s1) version2 = format.read_magic(s2) assert_(version1 == (1, 0)) assert_(version2 == (2, 0)) assert_(s1.tell() == format.MAGIC_LEN) assert_(s2.tell() == format.MAGIC_LEN) def test_read_magic_bad_magic(): for magic in malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_read_version_1_0_bad_magic(): for magic in bad_version_magic + malformed_magic: f = BytesIO(magic) assert_raises(ValueError, format.read_array, f) def test_bad_magic_args(): assert_raises(ValueError, format.magic, -1, 1) assert_raises(ValueError, format.magic, 256, 1) assert_raises(ValueError, format.magic, 1, -1) assert_raises(ValueError, format.magic, 1, 256) def test_large_header(): s = BytesIO() d = {'a': 1, 'b': 2} format.write_array_header_1_0(s, d) s = BytesIO() d = {'a': 1, 'b': 2, 'c': 'x'*256*256} assert_raises(ValueError, format.write_array_header_1_0, s, d) def test_read_array_header_1_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(1, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_1_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_read_array_header_2_0(): s = BytesIO() arr = np.ones((3, 6), dtype=float) format.write_array(s, arr, version=(2, 0)) s.seek(format.MAGIC_LEN) shape, fortran, dtype = format.read_array_header_2_0(s) assert_(s.tell() % format.ARRAY_ALIGN == 0) assert_((shape, fortran, dtype) == ((3, 6), False, float)) def test_bad_header(): # header of length less than 2 should fail s = BytesIO() assert_raises(ValueError, format.read_array_header_1_0, s) s = BytesIO(b'1') assert_raises(ValueError, format.read_array_header_1_0, s) # header shorter than indicated size should fail s = BytesIO(b'\x01\x00') assert_raises(ValueError, format.read_array_header_1_0, s) # headers without the exact keys required should fail d = {"shape": (1, 2), "descr": "x"} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) d = {"shape": (1, 2), "fortran_order": False, "descr": "x", "extrakey": -1} s = BytesIO() format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) def test_large_file_support(): if (sys.platform == 'win32' or sys.platform == 'cygwin'): pytest.skip("Unknown if Windows has sparse filesystems") # try creating a large sparse file tf_name = os.path.join(tempdir, 'sparse_file') try: # seek past end would work too, but linux truncate somewhat # increases the chances that we have a sparse filesystem and can # avoid actually writing 5GB import subprocess as sp sp.check_call(["truncate", "-s", "5368709120", tf_name]) except Exception: pytest.skip("Could not create 5GB large file") # write a small array to the end with open(tf_name, "wb") as f: f.seek(5368709120) d = np.arange(5) np.save(f, d) # read it back with open(tf_name, "rb") as f: f.seek(5368709120) r = np.load(f) assert_array_equal(r, d) @pytest.mark.skipif(np.dtype(np.intp).itemsize < 8, reason="test requires 64-bit system") @pytest.mark.slow def test_large_archive(): # Regression test for product of saving arrays with dimensions of array # having a product that doesn't fit in int32. See gh-7598 for details. try: a = np.empty((2**30, 2), dtype=np.uint8) except MemoryError: pytest.skip("Could not create large file") fname = os.path.join(tempdir, "large_archive") with open(fname, "wb") as f: np.savez(f, arr=a) with open(fname, "rb") as f: new_a = np.load(f)["arr"] assert_(a.shape == new_a.shape) def test_empty_npz(): # Test for gh-9989 fname = os.path.join(tempdir, "nothing.npz") np.savez(fname) np.load(fname) def test_unicode_field_names(): # gh-7391 arr = np.array([ (1, 3), (1, 2), (1, 3), (1, 2) ], dtype=[ ('int', int), (u'\N{CJK UNIFIED IDEOGRAPH-6574}\N{CJK UNIFIED IDEOGRAPH-5F62}', int) ]) fname = os.path.join(tempdir, "unicode.npy") with open(fname, 'wb') as f: format.write_array(f, arr, version=(3, 0)) with open(fname, 'rb') as f: arr2 = format.read_array(f) assert_array_equal(arr, arr2) # notifies the user that 3.0 is selected with open(fname, 'wb') as f: with assert_warns(UserWarning): format.write_array(f, arr, version=None) @pytest.mark.parametrize('dt, fail', [ (np.dtype({'names': ['a', 'b'], 'formats': [float, np.dtype('S3', metadata={'some': 'stuff'})]}), True), (np.dtype(int, metadata={'some': 'stuff'}), False), (np.dtype([('subarray', (int, (2,)))], metadata={'some': 'stuff'}), False), # recursive: metadata on the field of a dtype (np.dtype({'names': ['a', 'b'], 'formats': [ float, np.dtype({'names': ['c'], 'formats': [np.dtype(int, metadata={})]}) ]}), False) ]) def test_metadata_dtype(dt, fail): # gh-14142 arr = np.ones(10, dtype=dt) buf = BytesIO() with assert_warns(UserWarning): np.save(buf, arr) buf.seek(0) if fail: with assert_raises(ValueError): np.load(buf) else: arr2 = np.load(buf) # BUG: assert_array_equal does not check metadata from numpy.lib.format import _has_metadata assert_array_equal(arr, arr2) assert _has_metadata(arr.dtype) assert not _has_metadata(arr2.dtype)
simongibbons/numpy
numpy/lib/tests/test_format.py
numpy/core/einsumfunc.py
""" python generate_sparsetools.py Generate manual wrappers for C++ sparsetools code. Type codes used: 'i': integer scalar 'I': integer array 'T': data array 'B': boolean array 'V': std::vector<integer>* 'W': std::vector<data>* '*': indicates that the next argument is an output argument 'v': void 'l': 64-bit integer scalar See sparsetools.cxx for more details. """ import optparse import os from distutils.dep_util import newer # # List of all routines and their argument types. # # The first code indicates the return value, the rest the arguments. # # bsr.h BSR_ROUTINES = """ bsr_diagonal v iiiiiIIT*T bsr_tocsr v iiiiIIT*I*I*T bsr_scale_rows v iiiiII*TT bsr_scale_columns v iiiiII*TT bsr_sort_indices v iiii*I*I*T bsr_transpose v iiiiIIT*I*I*T bsr_matmat_pass2 v iiiiiIITIIT*I*I*T bsr_matvec v iiiiIITT*T bsr_matvecs v iiiiiIITT*T bsr_elmul_bsr v iiiiIITIIT*I*I*T bsr_eldiv_bsr v iiiiIITIIT*I*I*T bsr_plus_bsr v iiiiIITIIT*I*I*T bsr_minus_bsr v iiiiIITIIT*I*I*T bsr_maximum_bsr v iiiiIITIIT*I*I*T bsr_minimum_bsr v iiiiIITIIT*I*I*T bsr_ne_bsr v iiiiIITIIT*I*I*B bsr_lt_bsr v iiiiIITIIT*I*I*B bsr_gt_bsr v iiiiIITIIT*I*I*B bsr_le_bsr v iiiiIITIIT*I*I*B bsr_ge_bsr v iiiiIITIIT*I*I*B """ # csc.h CSC_ROUTINES = """ csc_diagonal v iiiIIT*T csc_tocsr v iiIIT*I*I*T csc_matmat_pass1 v iiIIII*I csc_matmat_pass2 v iiIITIIT*I*I*T csc_matvec v iiIITT*T csc_matvecs v iiiIITT*T csc_elmul_csc v iiIITIIT*I*I*T csc_eldiv_csc v iiIITIIT*I*I*T csc_plus_csc v iiIITIIT*I*I*T csc_minus_csc v iiIITIIT*I*I*T csc_maximum_csc v iiIITIIT*I*I*T csc_minimum_csc v iiIITIIT*I*I*T csc_ne_csc v iiIITIIT*I*I*B csc_lt_csc v iiIITIIT*I*I*B csc_gt_csc v iiIITIIT*I*I*B csc_le_csc v iiIITIIT*I*I*B csc_ge_csc v iiIITIIT*I*I*B """ # csr.h CSR_ROUTINES = """ csr_matmat_pass1 v iiIIII*I csr_matmat_pass2 v iiIITIIT*I*I*T csr_diagonal v iiiIIT*T csr_tocsc v iiIIT*I*I*T csr_tobsr v iiiiIIT*I*I*T csr_todense v iiIIT*T csr_matvec v iiIITT*T csr_matvecs v iiiIITT*T csr_elmul_csr v iiIITIIT*I*I*T csr_eldiv_csr v iiIITIIT*I*I*T csr_plus_csr v iiIITIIT*I*I*T csr_minus_csr v iiIITIIT*I*I*T csr_maximum_csr v iiIITIIT*I*I*T csr_minimum_csr v iiIITIIT*I*I*T csr_ne_csr v iiIITIIT*I*I*B csr_lt_csr v iiIITIIT*I*I*B csr_gt_csr v iiIITIIT*I*I*B csr_le_csr v iiIITIIT*I*I*B csr_ge_csr v iiIITIIT*I*I*B csr_scale_rows v iiII*TT csr_scale_columns v iiII*TT csr_sort_indices v iI*I*T csr_eliminate_zeros v ii*I*I*T csr_sum_duplicates v ii*I*I*T get_csr_submatrix v iiIITiiii*V*V*W csr_row_index v iIIIT*I*T csr_row_slice v iiiIIT*I*T csr_column_index1 v iIiiII*I*I csr_column_index2 v IIiIT*I*T csr_sample_values v iiIITiII*T csr_count_blocks i iiiiII csr_sample_offsets i iiIIiII*I expandptr v iI*I test_throw_error i csr_has_sorted_indices i iII csr_has_canonical_format i iII """ # coo.h, dia.h, csgraph.h OTHER_ROUTINES = """ coo_tocsr v iiiIIT*I*I*T coo_todense v iilIIT*Ti coo_matvec v lIITT*T dia_matvec v iiiiITT*T cs_graph_components i iII*I """ # List of compilation units COMPILATION_UNITS = [ ('bsr', BSR_ROUTINES), ('csr', CSR_ROUTINES), ('csc', CSC_ROUTINES), ('other', OTHER_ROUTINES), ] # # List of the supported index typenums and the corresponding C++ types # I_TYPES = [ ('NPY_INT32', 'npy_int32'), ('NPY_INT64', 'npy_int64'), ] # # List of the supported data typenums and the corresponding C++ types # T_TYPES = [ ('NPY_BOOL', 'npy_bool_wrapper'), ('NPY_BYTE', 'npy_byte'), ('NPY_UBYTE', 'npy_ubyte'), ('NPY_SHORT', 'npy_short'), ('NPY_USHORT', 'npy_ushort'), ('NPY_INT', 'npy_int'), ('NPY_UINT', 'npy_uint'), ('NPY_LONG', 'npy_long'), ('NPY_ULONG', 'npy_ulong'), ('NPY_LONGLONG', 'npy_longlong'), ('NPY_ULONGLONG', 'npy_ulonglong'), ('NPY_FLOAT', 'npy_float'), ('NPY_DOUBLE', 'npy_double'), ('NPY_LONGDOUBLE', 'npy_longdouble'), ('NPY_CFLOAT', 'npy_cfloat_wrapper'), ('NPY_CDOUBLE', 'npy_cdouble_wrapper'), ('NPY_CLONGDOUBLE', 'npy_clongdouble_wrapper'), ] # # Code templates # THUNK_TEMPLATE = """ static PY_LONG_LONG %(name)s_thunk(int I_typenum, int T_typenum, void **a) { %(thunk_content)s } """ METHOD_TEMPLATE = """ NPY_VISIBILITY_HIDDEN PyObject * %(name)s_method(PyObject *self, PyObject *args) { return call_thunk('%(ret_spec)s', "%(arg_spec)s", %(name)s_thunk, args); } """ GET_THUNK_CASE_TEMPLATE = """ static int get_thunk_case(int I_typenum, int T_typenum) { %(content)s; return -1; } """ # # Code generation # def get_thunk_type_set(): """ Get a list containing cartesian product of data types, plus a getter routine. Returns ------- i_types : list [(j, I_typenum, None, I_type, None), ...] Pairing of index type numbers and the corresponding C++ types, and an unique index `j`. This is for routines that are parameterized only by I but not by T. it_types : list [(j, I_typenum, T_typenum, I_type, T_type), ...] Same as `i_types`, but for routines parameterized both by T and I. getter_code : str C++ code for a function that takes I_typenum, T_typenum and returns the unique index corresponding to the lists, or -1 if no match was found. """ it_types = [] i_types = [] j = 0 getter_code = " if (0) {}" for I_typenum, I_type in I_TYPES: piece = """ else if (I_typenum == %(I_typenum)s) { if (T_typenum == -1) { return %(j)s; }""" getter_code += piece % dict(I_typenum=I_typenum, j=j) i_types.append((j, I_typenum, None, I_type, None)) j += 1 for T_typenum, T_type in T_TYPES: piece = """ else if (T_typenum == %(T_typenum)s) { return %(j)s; }""" getter_code += piece % dict(T_typenum=T_typenum, j=j) it_types.append((j, I_typenum, T_typenum, I_type, T_type)) j += 1 getter_code += """ }""" return i_types, it_types, GET_THUNK_CASE_TEMPLATE % dict(content=getter_code) def parse_routine(name, args, types): """ Generate thunk and method code for a given routine. Parameters ---------- name : str Name of the C++ routine args : str Argument list specification (in format explained above) types : list List of types to instantiate, as returned `get_thunk_type_set` """ ret_spec = args[0] arg_spec = args[1:] def get_arglist(I_type, T_type): """ Generate argument list for calling the C++ function """ args = [] next_is_writeable = False j = 0 for t in arg_spec: const = '' if next_is_writeable else 'const ' next_is_writeable = False if t == '*': next_is_writeable = True continue elif t == 'i': args.append("*(%s*)a[%d]" % (const + I_type, j)) elif t == 'I': args.append("(%s*)a[%d]" % (const + I_type, j)) elif t == 'T': args.append("(%s*)a[%d]" % (const + T_type, j)) elif t == 'B': args.append("(npy_bool_wrapper*)a[%d]" % (j,)) elif t == 'V': if const: raise ValueError("'V' argument must be an output arg") args.append("(std::vector<%s>*)a[%d]" % (I_type, j,)) elif t == 'W': if const: raise ValueError("'W' argument must be an output arg") args.append("(std::vector<%s>*)a[%d]" % (T_type, j,)) elif t == 'l': args.append("*(%snpy_int64*)a[%d]" % (const, j)) else: raise ValueError("Invalid spec character %r" % (t,)) j += 1 return ", ".join(args) # Generate thunk code: a giant switch statement with different # type combinations inside. thunk_content = """int j = get_thunk_case(I_typenum, T_typenum); switch (j) {""" for j, I_typenum, T_typenum, I_type, T_type in types: arglist = get_arglist(I_type, T_type) if T_type is None: dispatch = "%s" % (I_type,) else: dispatch = "%s,%s" % (I_type, T_type) if 'B' in arg_spec: dispatch += ",npy_bool_wrapper" piece = """ case %(j)s:""" if ret_spec == 'v': piece += """ (void)%(name)s<%(dispatch)s>(%(arglist)s); return 0;""" else: piece += """ return %(name)s<%(dispatch)s>(%(arglist)s);""" thunk_content += piece % dict(j=j, I_type=I_type, T_type=T_type, I_typenum=I_typenum, T_typenum=T_typenum, arglist=arglist, name=name, dispatch=dispatch) thunk_content += """ default: throw std::runtime_error("internal error: invalid argument typenums"); }""" thunk_code = THUNK_TEMPLATE % dict(name=name, thunk_content=thunk_content) # Generate method code method_code = METHOD_TEMPLATE % dict(name=name, ret_spec=ret_spec, arg_spec=arg_spec) return thunk_code, method_code def main(): p = optparse.OptionParser(usage=(__doc__ or '').strip()) p.add_option("--no-force", action="store_false", dest="force", default=True) options, args = p.parse_args() names = [] i_types, it_types, getter_code = get_thunk_type_set() # Generate *_impl.h for each compilation unit for unit_name, routines in COMPILATION_UNITS: thunks = [] methods = [] # Generate thunks and methods for all routines for line in routines.splitlines(): line = line.strip() if not line or line.startswith('#'): continue try: name, args = line.split(None, 1) except ValueError: raise ValueError("Malformed line: %r" % (line,)) args = "".join(args.split()) if 't' in args or 'T' in args: thunk, method = parse_routine(name, args, it_types) else: thunk, method = parse_routine(name, args, i_types) if name in names: raise ValueError("Duplicate routine %r" % (name,)) names.append(name) thunks.append(thunk) methods.append(method) # Produce output dst = os.path.join(os.path.dirname(__file__), 'sparsetools', unit_name + '_impl.h') if newer(__file__, dst) or options.force: print("[generate_sparsetools] generating %r" % (dst,)) with open(dst, 'w') as f: write_autogen_blurb(f) f.write(getter_code) for thunk in thunks: f.write(thunk) for method in methods: f.write(method) else: print("[generate_sparsetools] %r already up-to-date" % (dst,)) # Generate code for method struct method_defs = "" for name in names: method_defs += "NPY_VISIBILITY_HIDDEN PyObject *%s_method(PyObject *, PyObject *);\n" % (name,) method_struct = """\nstatic struct PyMethodDef sparsetools_methods[] = {""" for name in names: method_struct += """ {"%(name)s", (PyCFunction)%(name)s_method, METH_VARARGS, NULL},""" % dict(name=name) method_struct += """ {NULL, NULL, 0, NULL} };""" # Produce sparsetools_impl.h dst = os.path.join(os.path.dirname(__file__), 'sparsetools', 'sparsetools_impl.h') if newer(__file__, dst) or options.force: print("[generate_sparsetools] generating %r" % (dst,)) with open(dst, 'w') as f: write_autogen_blurb(f) f.write(method_defs) f.write(method_struct) else: print("[generate_sparsetools] %r already up-to-date" % (dst,)) def write_autogen_blurb(stream): stream.write("""\ /* This file is autogenerated by generate_sparsetools.py * Do not edit manually or check into VCS. */ """) if __name__ == "__main__": main()
from __future__ import division, print_function, absolute_import import sys import threading import numpy as np from numpy import array, finfo, arange, eye, all, unique, ones, dot import numpy.random as random from numpy.testing import ( assert_array_almost_equal, assert_almost_equal, assert_equal, assert_array_equal, assert_, assert_allclose, assert_warns) import pytest from pytest import raises as assert_raises import scipy.linalg from scipy.linalg import norm, inv from scipy.sparse import (spdiags, SparseEfficiencyWarning, csc_matrix, csr_matrix, identity, isspmatrix, dok_matrix, lil_matrix, bsr_matrix) from scipy.sparse.linalg import SuperLU from scipy.sparse.linalg.dsolve import (spsolve, use_solver, splu, spilu, MatrixRankWarning, _superlu, spsolve_triangular, factorized) from scipy._lib._numpy_compat import suppress_warnings sup_sparse_efficiency = suppress_warnings() sup_sparse_efficiency.filter(SparseEfficiencyWarning) # scikits.umfpack is not a SciPy dependency but it is optionally used in # dsolve, so check whether it's available try: import scikits.umfpack as umfpack has_umfpack = True except ImportError: has_umfpack = False def toarray(a): if isspmatrix(a): return a.toarray() else: return a class TestFactorized(object): def setup_method(self): n = 5 d = arange(n) + 1 self.n = n self.A = spdiags((d, 2*d, d[::-1]), (-3, 0, 5), n, n).tocsc() random.seed(1234) def _check_singular(self): A = csc_matrix((5,5), dtype='d') b = ones(5) assert_array_almost_equal(0. * b, factorized(A)(b)) def _check_non_singular(self): # Make a diagonal dominant, to make sure it is not singular n = 5 a = csc_matrix(random.rand(n, n)) b = ones(n) expected = splu(a).solve(b) assert_array_almost_equal(factorized(a)(b), expected) def test_singular_without_umfpack(self): use_solver(useUmfpack=False) with assert_raises(RuntimeError, match="Factor is exactly singular"): self._check_singular() @pytest.mark.skipif(not has_umfpack, reason="umfpack not available") def test_singular_with_umfpack(self): use_solver(useUmfpack=True) with suppress_warnings() as sup: sup.filter(RuntimeWarning, "divide by zero encountered in double_scalars") assert_warns(umfpack.UmfpackWarning, self._check_singular) def test_non_singular_without_umfpack(self): use_solver(useUmfpack=False) self._check_non_singular() @pytest.mark.skipif(not has_umfpack, reason="umfpack not available") def test_non_singular_with_umfpack(self): use_solver(useUmfpack=True) self._check_non_singular() def test_cannot_factorize_nonsquare_matrix_without_umfpack(self): use_solver(useUmfpack=False) msg = "can only factor square matrices" with assert_raises(ValueError, match=msg): factorized(self.A[:, :4]) @pytest.mark.skipif(not has_umfpack, reason="umfpack not available") def test_factorizes_nonsquare_matrix_with_umfpack(self): use_solver(useUmfpack=True) # does not raise factorized(self.A[:,:4]) def test_call_with_incorrectly_sized_matrix_without_umfpack(self): use_solver(useUmfpack=False) solve = factorized(self.A) b = random.rand(4) B = random.rand(4, 3) BB = random.rand(self.n, 3, 9) with assert_raises(ValueError, match="is of incompatible size"): solve(b) with assert_raises(ValueError, match="is of incompatible size"): solve(B) with assert_raises(ValueError, match="object too deep for desired array"): solve(BB) @pytest.mark.skipif(not has_umfpack, reason="umfpack not available") def test_call_with_incorrectly_sized_matrix_with_umfpack(self): use_solver(useUmfpack=True) solve = factorized(self.A) b = random.rand(4) B = random.rand(4, 3) BB = random.rand(self.n, 3, 9) # does not raise solve(b) msg = "object too deep for desired array" with assert_raises(ValueError, match=msg): solve(B) with assert_raises(ValueError, match=msg): solve(BB) def test_call_with_cast_to_complex_without_umfpack(self): use_solver(useUmfpack=False) solve = factorized(self.A) b = random.rand(4) for t in [np.complex64, np.complex128]: with assert_raises(TypeError, match="Cannot cast array data"): solve(b.astype(t)) @pytest.mark.skipif(not has_umfpack, reason="umfpack not available") def test_call_with_cast_to_complex_with_umfpack(self): use_solver(useUmfpack=True) solve = factorized(self.A) b = random.rand(4) for t in [np.complex64, np.complex128]: assert_warns(np.ComplexWarning, solve, b.astype(t)) @pytest.mark.skipif(not has_umfpack, reason="umfpack not available") def test_assume_sorted_indices_flag(self): # a sparse matrix with unsorted indices unsorted_inds = np.array([2, 0, 1, 0]) data = np.array([10, 16, 5, 0.4]) indptr = np.array([0, 1, 2, 4]) A = csc_matrix((data, unsorted_inds, indptr), (3, 3)) b = ones(3) # should raise when incorrectly assuming indices are sorted use_solver(useUmfpack=True, assumeSortedIndices=True) with assert_raises(RuntimeError, match="UMFPACK_ERROR_invalid_matrix"): factorized(A) # should sort indices and succeed when not assuming indices are sorted use_solver(useUmfpack=True, assumeSortedIndices=False) expected = splu(A.copy()).solve(b) assert_equal(A.has_sorted_indices, 0) assert_array_almost_equal(factorized(A)(b), expected) assert_equal(A.has_sorted_indices, 1) class TestLinsolve(object): def setup_method(self): use_solver(useUmfpack=False) def test_singular(self): A = csc_matrix((5,5), dtype='d') b = array([1, 2, 3, 4, 5],dtype='d') with suppress_warnings() as sup: sup.filter(MatrixRankWarning, "Matrix is exactly singular") x = spsolve(A, b) assert_(not np.isfinite(x).any()) def test_singular_gh_3312(self): # "Bad" test case that leads SuperLU to call LAPACK with invalid # arguments. Check that it fails moderately gracefully. ij = np.array([(17, 0), (17, 6), (17, 12), (10, 13)], dtype=np.int32) v = np.array([0.284213, 0.94933781, 0.15767017, 0.38797296]) A = csc_matrix((v, ij.T), shape=(20, 20)) b = np.arange(20) try: # should either raise a runtimeerror or return value # appropriate for singular input x = spsolve(A, b) assert_(not np.isfinite(x).any()) except RuntimeError: pass def test_twodiags(self): A = spdiags([[1, 2, 3, 4, 5], [6, 5, 8, 9, 10]], [0, 1], 5, 5) b = array([1, 2, 3, 4, 5]) # condition number of A cond_A = norm(A.todense(),2) * norm(inv(A.todense()),2) for t in ['f','d','F','D']: eps = finfo(t).eps # floating point epsilon b = b.astype(t) for format in ['csc','csr']: Asp = A.astype(t).asformat(format) x = spsolve(Asp,b) assert_(norm(b - Asp*x) < 10 * cond_A * eps) def test_bvector_smoketest(self): Adense = array([[0., 1., 1.], [1., 0., 1.], [0., 0., 1.]]) As = csc_matrix(Adense) random.seed(1234) x = random.randn(3) b = As*x x2 = spsolve(As, b) assert_array_almost_equal(x, x2) def test_bmatrix_smoketest(self): Adense = array([[0., 1., 1.], [1., 0., 1.], [0., 0., 1.]]) As = csc_matrix(Adense) random.seed(1234) x = random.randn(3, 4) Bdense = As.dot(x) Bs = csc_matrix(Bdense) x2 = spsolve(As, Bs) assert_array_almost_equal(x, x2.todense()) @sup_sparse_efficiency def test_non_square(self): # A is not square. A = ones((3, 4)) b = ones((4, 1)) assert_raises(ValueError, spsolve, A, b) # A2 and b2 have incompatible shapes. A2 = csc_matrix(eye(3)) b2 = array([1.0, 2.0]) assert_raises(ValueError, spsolve, A2, b2) @sup_sparse_efficiency def test_example_comparison(self): row = array([0,0,1,2,2,2]) col = array([0,2,2,0,1,2]) data = array([1,2,3,-4,5,6]) sM = csr_matrix((data,(row,col)), shape=(3,3), dtype=float) M = sM.todense() row = array([0,0,1,1,0,0]) col = array([0,2,1,1,0,0]) data = array([1,1,1,1,1,1]) sN = csr_matrix((data, (row,col)), shape=(3,3), dtype=float) N = sN.todense() sX = spsolve(sM, sN) X = scipy.linalg.solve(M, N) assert_array_almost_equal(X, sX.todense()) @sup_sparse_efficiency @pytest.mark.skipif(not has_umfpack, reason="umfpack not available") def test_shape_compatibility(self): use_solver(useUmfpack=True) A = csc_matrix([[1., 0], [0, 2]]) bs = [ [1, 6], array([1, 6]), [[1], [6]], array([[1], [6]]), csc_matrix([[1], [6]]), csr_matrix([[1], [6]]), dok_matrix([[1], [6]]), bsr_matrix([[1], [6]]), array([[1., 2., 3.], [6., 8., 10.]]), csc_matrix([[1., 2., 3.], [6., 8., 10.]]), csr_matrix([[1., 2., 3.], [6., 8., 10.]]), dok_matrix([[1., 2., 3.], [6., 8., 10.]]), bsr_matrix([[1., 2., 3.], [6., 8., 10.]]), ] for b in bs: x = np.linalg.solve(A.toarray(), toarray(b)) for spmattype in [csc_matrix, csr_matrix, dok_matrix, lil_matrix]: x1 = spsolve(spmattype(A), b, use_umfpack=True) x2 = spsolve(spmattype(A), b, use_umfpack=False) # check solution if x.ndim == 2 and x.shape[1] == 1: # interprets also these as "vectors" x = x.ravel() assert_array_almost_equal(toarray(x1), x, err_msg=repr((b, spmattype, 1))) assert_array_almost_equal(toarray(x2), x, err_msg=repr((b, spmattype, 2))) # dense vs. sparse output ("vectors" are always dense) if isspmatrix(b) and x.ndim > 1: assert_(isspmatrix(x1), repr((b, spmattype, 1))) assert_(isspmatrix(x2), repr((b, spmattype, 2))) else: assert_(isinstance(x1, np.ndarray), repr((b, spmattype, 1))) assert_(isinstance(x2, np.ndarray), repr((b, spmattype, 2))) # check output shape if x.ndim == 1: # "vector" assert_equal(x1.shape, (A.shape[1],)) assert_equal(x2.shape, (A.shape[1],)) else: # "matrix" assert_equal(x1.shape, x.shape) assert_equal(x2.shape, x.shape) A = csc_matrix((3, 3)) b = csc_matrix((1, 3)) assert_raises(ValueError, spsolve, A, b) @sup_sparse_efficiency def test_ndarray_support(self): A = array([[1., 2.], [2., 0.]]) x = array([[1., 1.], [0.5, -0.5]]) b = array([[2., 0.], [2., 2.]]) assert_array_almost_equal(x, spsolve(A, b)) def test_gssv_badinput(self): N = 10 d = arange(N) + 1.0 A = spdiags((d, 2*d, d[::-1]), (-3, 0, 5), N, N) for spmatrix in (csc_matrix, csr_matrix): A = spmatrix(A) b = np.arange(N) def not_c_contig(x): return x.repeat(2)[::2] def not_1dim(x): return x[:,None] def bad_type(x): return x.astype(bool) def too_short(x): return x[:-1] badops = [not_c_contig, not_1dim, bad_type, too_short] for badop in badops: msg = "%r %r" % (spmatrix, badop) # Not C-contiguous assert_raises((ValueError, TypeError), _superlu.gssv, N, A.nnz, badop(A.data), A.indices, A.indptr, b, int(spmatrix == csc_matrix), err_msg=msg) assert_raises((ValueError, TypeError), _superlu.gssv, N, A.nnz, A.data, badop(A.indices), A.indptr, b, int(spmatrix == csc_matrix), err_msg=msg) assert_raises((ValueError, TypeError), _superlu.gssv, N, A.nnz, A.data, A.indices, badop(A.indptr), b, int(spmatrix == csc_matrix), err_msg=msg) def test_sparsity_preservation(self): ident = csc_matrix([ [1, 0, 0], [0, 1, 0], [0, 0, 1]]) b = csc_matrix([ [0, 1], [1, 0], [0, 0]]) x = spsolve(ident, b) assert_equal(ident.nnz, 3) assert_equal(b.nnz, 2) assert_equal(x.nnz, 2) assert_allclose(x.A, b.A, atol=1e-12, rtol=1e-12) def test_dtype_cast(self): A_real = scipy.sparse.csr_matrix([[1, 2, 0], [0, 0, 3], [4, 0, 5]]) A_complex = scipy.sparse.csr_matrix([[1, 2, 0], [0, 0, 3], [4, 0, 5 + 1j]]) b_real = np.array([1,1,1]) b_complex = np.array([1,1,1]) + 1j*np.array([1,1,1]) x = spsolve(A_real, b_real) assert_(np.issubdtype(x.dtype, np.floating)) x = spsolve(A_real, b_complex) assert_(np.issubdtype(x.dtype, np.complexfloating)) x = spsolve(A_complex, b_real) assert_(np.issubdtype(x.dtype, np.complexfloating)) x = spsolve(A_complex, b_complex) assert_(np.issubdtype(x.dtype, np.complexfloating)) class TestSplu(object): def setup_method(self): use_solver(useUmfpack=False) n = 40 d = arange(n) + 1 self.n = n self.A = spdiags((d, 2*d, d[::-1]), (-3, 0, 5), n, n) random.seed(1234) def _smoketest(self, spxlu, check, dtype): if np.issubdtype(dtype, np.complexfloating): A = self.A + 1j*self.A.T else: A = self.A A = A.astype(dtype) lu = spxlu(A) rng = random.RandomState(1234) # Input shapes for k in [None, 1, 2, self.n, self.n+2]: msg = "k=%r" % (k,) if k is None: b = rng.rand(self.n) else: b = rng.rand(self.n, k) if np.issubdtype(dtype, np.complexfloating): b = b + 1j*rng.rand(*b.shape) b = b.astype(dtype) x = lu.solve(b) check(A, b, x, msg) x = lu.solve(b, 'T') check(A.T, b, x, msg) x = lu.solve(b, 'H') check(A.T.conj(), b, x, msg) @sup_sparse_efficiency def test_splu_smoketest(self): self._internal_test_splu_smoketest() def _internal_test_splu_smoketest(self): # Check that splu works at all def check(A, b, x, msg=""): eps = np.finfo(A.dtype).eps r = A * x assert_(abs(r - b).max() < 1e3*eps, msg) self._smoketest(splu, check, np.float32) self._smoketest(splu, check, np.float64) self._smoketest(splu, check, np.complex64) self._smoketest(splu, check, np.complex128) @sup_sparse_efficiency def test_spilu_smoketest(self): self._internal_test_spilu_smoketest() def _internal_test_spilu_smoketest(self): errors = [] def check(A, b, x, msg=""): r = A * x err = abs(r - b).max() assert_(err < 1e-2, msg) if b.dtype in (np.float64, np.complex128): errors.append(err) self._smoketest(spilu, check, np.float32) self._smoketest(spilu, check, np.float64) self._smoketest(spilu, check, np.complex64) self._smoketest(spilu, check, np.complex128) assert_(max(errors) > 1e-5) @sup_sparse_efficiency def test_spilu_drop_rule(self): # Test passing in the drop_rule argument to spilu. A = identity(2) rules = [ b'basic,area'.decode('ascii'), # unicode b'basic,area', # ascii [b'basic', b'area'.decode('ascii')] ] for rule in rules: # Argument should be accepted assert_(isinstance(spilu(A, drop_rule=rule), SuperLU)) def test_splu_nnz0(self): A = csc_matrix((5,5), dtype='d') assert_raises(RuntimeError, splu, A) def test_spilu_nnz0(self): A = csc_matrix((5,5), dtype='d') assert_raises(RuntimeError, spilu, A) def test_splu_basic(self): # Test basic splu functionality. n = 30 rng = random.RandomState(12) a = rng.rand(n, n) a[a < 0.95] = 0 # First test with a singular matrix a[:, 0] = 0 a_ = csc_matrix(a) # Matrix is exactly singular assert_raises(RuntimeError, splu, a_) # Make a diagonal dominant, to make sure it is not singular a += 4*eye(n) a_ = csc_matrix(a) lu = splu(a_) b = ones(n) x = lu.solve(b) assert_almost_equal(dot(a, x), b) def test_splu_perm(self): # Test the permutation vectors exposed by splu. n = 30 a = random.random((n, n)) a[a < 0.95] = 0 # Make a diagonal dominant, to make sure it is not singular a += 4*eye(n) a_ = csc_matrix(a) lu = splu(a_) # Check that the permutation indices do belong to [0, n-1]. for perm in (lu.perm_r, lu.perm_c): assert_(all(perm > -1)) assert_(all(perm < n)) assert_equal(len(unique(perm)), len(perm)) # Now make a symmetric, and test that the two permutation vectors are # the same # Note: a += a.T relies on undefined behavior. a = a + a.T a_ = csc_matrix(a) lu = splu(a_) assert_array_equal(lu.perm_r, lu.perm_c) @pytest.mark.skipif(not hasattr(sys, 'getrefcount'), reason="no sys.getrefcount") def test_lu_refcount(self): # Test that we are keeping track of the reference count with splu. n = 30 a = random.random((n, n)) a[a < 0.95] = 0 # Make a diagonal dominant, to make sure it is not singular a += 4*eye(n) a_ = csc_matrix(a) lu = splu(a_) # And now test that we don't have a refcount bug rc = sys.getrefcount(lu) for attr in ('perm_r', 'perm_c'): perm = getattr(lu, attr) assert_equal(sys.getrefcount(lu), rc + 1) del perm assert_equal(sys.getrefcount(lu), rc) def test_bad_inputs(self): A = self.A.tocsc() assert_raises(ValueError, splu, A[:,:4]) assert_raises(ValueError, spilu, A[:,:4]) for lu in [splu(A), spilu(A)]: b = random.rand(42) B = random.rand(42, 3) BB = random.rand(self.n, 3, 9) assert_raises(ValueError, lu.solve, b) assert_raises(ValueError, lu.solve, B) assert_raises(ValueError, lu.solve, BB) assert_raises(TypeError, lu.solve, b.astype(np.complex64)) assert_raises(TypeError, lu.solve, b.astype(np.complex128)) @sup_sparse_efficiency def test_superlu_dlamch_i386_nan(self): # SuperLU 4.3 calls some functions returning floats without # declaring them. On i386@linux call convention, this fails to # clear floating point registers after call. As a result, NaN # can appear in the next floating point operation made. # # Here's a test case that triggered the issue. n = 8 d = np.arange(n) + 1 A = spdiags((d, 2*d, d[::-1]), (-3, 0, 5), n, n) A = A.astype(np.float32) spilu(A) A = A + 1j*A B = A.A assert_(not np.isnan(B).any()) @sup_sparse_efficiency def test_lu_attr(self): def check(dtype, complex_2=False): A = self.A.astype(dtype) if complex_2: A = A + 1j*A.T n = A.shape[0] lu = splu(A) # Check that the decomposition is as advertized Pc = np.zeros((n, n)) Pc[np.arange(n), lu.perm_c] = 1 Pr = np.zeros((n, n)) Pr[lu.perm_r, np.arange(n)] = 1 Ad = A.toarray() lhs = Pr.dot(Ad).dot(Pc) rhs = (lu.L * lu.U).toarray() eps = np.finfo(dtype).eps assert_allclose(lhs, rhs, atol=100*eps) check(np.float32) check(np.float64) check(np.complex64) check(np.complex128) check(np.complex64, True) check(np.complex128, True) @pytest.mark.slow @sup_sparse_efficiency def test_threads_parallel(self): oks = [] def worker(): try: self.test_splu_basic() self._internal_test_splu_smoketest() self._internal_test_spilu_smoketest() oks.append(True) except Exception: pass threads = [threading.Thread(target=worker) for k in range(20)] for t in threads: t.start() for t in threads: t.join() assert_equal(len(oks), 20) class TestSpsolveTriangular(object): def setup_method(self): use_solver(useUmfpack=False) def test_singular(self): n = 5 A = csr_matrix((n, n)) b = np.arange(n) for lower in (True, False): assert_raises(scipy.linalg.LinAlgError, spsolve_triangular, A, b, lower=lower) @sup_sparse_efficiency def test_bad_shape(self): # A is not square. A = np.zeros((3, 4)) b = ones((4, 1)) assert_raises(ValueError, spsolve_triangular, A, b) # A2 and b2 have incompatible shapes. A2 = csr_matrix(eye(3)) b2 = array([1.0, 2.0]) assert_raises(ValueError, spsolve_triangular, A2, b2) @sup_sparse_efficiency def test_input_types(self): A = array([[1., 0.], [1., 2.]]) b = array([[2., 0.], [2., 2.]]) for matrix_type in (array, csc_matrix, csr_matrix): x = spsolve_triangular(matrix_type(A), b, lower=True) assert_array_almost_equal(A.dot(x), b) @pytest.mark.slow @sup_sparse_efficiency def test_random(self): def random_triangle_matrix(n, lower=True): A = scipy.sparse.random(n, n, density=0.1, format='coo') if lower: A = scipy.sparse.tril(A) else: A = scipy.sparse.triu(A) A = A.tocsr(copy=False) for i in range(n): A[i, i] = np.random.rand() + 1 return A np.random.seed(1234) for lower in (True, False): for n in (10, 10**2, 10**3): A = random_triangle_matrix(n, lower=lower) for m in (1, 10): for b in (np.random.rand(n, m), np.random.randint(-9, 9, (n, m)), np.random.randint(-9, 9, (n, m)) + np.random.randint(-9, 9, (n, m)) * 1j): x = spsolve_triangular(A, b, lower=lower) assert_array_almost_equal(A.dot(x), b)
lhilt/scipy
scipy/sparse/linalg/dsolve/tests/test_linsolve.py
scipy/sparse/generate_sparsetools.py
import json from wal_e.storage.base import BackupInfo class WABSBackupInfo(BackupInfo): def load_detail(self, conn): if self._details_loaded: return uri = "{scheme}://{bucket}/{path}".format( scheme=self.layout.scheme, bucket=self.layout.store_name(), path=self.layout.basebackup_sentinel(self)) from wal_e.blobstore import wabs data = wabs.uri_get_file(None, uri, conn=conn).decode('utf-8') data = json.loads(data) for (k, v) in list(data.items()): setattr(self, k, v) self._details_loaded = True
import pytest from wal_e import exception from wal_e import worker class FakeTarPartition(object): """Implements enough protocol to test concurrency semantics.""" def __init__(self, num_members, explosive=False): self._explosive = explosive self.num_members = num_members def __len__(self): return self.num_members class FakeUploader(object): """A no-op uploader that makes affordance for fault injection.""" def __call__(self, tpart): if tpart._explosive: raise tpart._explosive return tpart class Explosion(Exception): """Marker type of injected faults.""" pass def make_pool(max_concurrency, max_members): """Set up a pool with a FakeUploader""" return worker.TarUploadPool(FakeUploader(), max_concurrency, max_members) def test_simple(): """Simple case of uploading one partition.""" pool = make_pool(1, 1) pool.put(FakeTarPartition(1)) pool.join() def test_not_enough_resources(): """Detect if a too-large segment can never complete.""" pool = make_pool(1, 1) with pytest.raises(exception.UserCritical): pool.put(FakeTarPartition(2)) pool.join() def test_simple_concurrency(): """Try a pool that cannot execute all submitted jobs at once.""" pool = make_pool(1, 1) for i in range(3): pool.put(FakeTarPartition(1)) pool.join() def test_fault_midstream(): """Test if a previous upload fault is detected in calling .put. This case is seen while pipelining many uploads in excess of the maximum concurrency. NB: This test is critical as to prevent failed uploads from failing to notify a caller that the entire backup is incomplete. """ pool = make_pool(1, 1) # Set up upload doomed to fail. tpart = FakeTarPartition(1, explosive=Explosion('Boom')) pool.put(tpart) # Try to receive the error through adding another upload. tpart = FakeTarPartition(1) with pytest.raises(Explosion): pool.put(tpart) def test_fault_join(): """Test if a fault is detected when .join is used. This case is seen at the end of a series of uploads. NB: This test is critical as to prevent failed uploads from failing to notify a caller that the entire backup is incomplete. """ pool = make_pool(1, 1) # Set up upload doomed to fail. tpart = FakeTarPartition(1, explosive=Explosion('Boom')) pool.put(tpart) # Try to receive the error while finishing up. with pytest.raises(Explosion): pool.join() def test_put_after_join(): """New jobs cannot be submitted after a .join This is mostly a re-check to detect programming errors. """ pool = make_pool(1, 1) pool.join() with pytest.raises(exception.UserCritical): pool.put(FakeTarPartition(1)) def test_pool_concurrent_success(): pool = make_pool(4, 4) for i in range(30): pool.put(FakeTarPartition(1)) pool.join() def test_pool_concurrent_failure(): pool = make_pool(4, 4) parts = [FakeTarPartition(1) for i in range(30)] exc = Explosion('boom') parts[27]._explosive = exc with pytest.raises(Explosion) as e: for part in parts: pool.put(part) pool.join() assert e.value is exc
tenstartups/wal-e
tests/test_tar_upload_pool.py
wal_e/storage/wabs_storage.py
from citrination_client.models.columns.base import BaseColumn from citrination_client.base.errors import CitrinationClientError class RealColumn(BaseColumn): """ A real column configuration for a data view. Parameterized with the basic column options and an upper and lower bound on the acceptable values for the column """ TYPE = "Real" def __init__(self, name, role, group_by_key=False, units=None, lower_bound=None, upper_bound=None): """ Constructor. :param name: The name of the column :type name: str :param role: The role the column will play in machine learning: "Input" "Output" "Latent Variable" "Ignore" :type role: str :param group_by_key: Whether or not this column should be used for grouping during cross validation :type group_by_key: bool :param units: Optionally, the units for the column :type units: str :param lower_bound: The lower bound for valid values for this column :type lower_bound: float :param upper_bound: The upper bound for valid values for this column :type upper_bound: float """ super(RealColumn, self).__init__(name=name, role=role, group_by_key=group_by_key, units=units) # Default bounds to None to enable validation via the setter methods self._lower_bound = None self._upper_bound = None self.lower_bound = lower_bound self.upper_bound = upper_bound def build_options(self): # Ensure that infinity values are serialized # into "Infinity" or "-Infinity", as the Citrination # server expects if self.upper_bound == float("inf"): upper_bound = "Infinity" else: upper_bound = self.upper_bound if self.lower_bound == float("-inf"): lower_bound = "-Infinity" else: lower_bound = self.lower_bound return { "lower_bound": lower_bound, "upper_bound": upper_bound } @property def lower_bound(self): return self._lower_bound @lower_bound.setter def lower_bound(self, value): self._lower_bound = self._cast_and_validate_float(value, "lower_bound") self._validate_bounds() @lower_bound.deleter def lower_bound(self): self._lower_bound = None @property def upper_bound(self): return self._upper_bound @upper_bound.setter def upper_bound(self, value): self._upper_bound = self._cast_and_validate_float(value, "upper_bound") self._validate_bounds() @upper_bound.deleter def upper_bound(self): self._upper_bound = None def _both_bounds_present(self): return self.lower_bound is not None and self.upper_bound is not None def _validate_bounds(self): if self._both_bounds_present() and self._lower_bound > self._upper_bound: raise CitrinationClientError("When constructing a RealColumn, lower_bound must be less than upper_bound") def _cast_and_validate_float(self, value, attr_name): try: return float(value) except ValueError: raise CitrinationClientError("For a RealColumn, {} must be castable as a float".format(attr_name))
import pytest from citrination_client.models.columns import * from citrination_client.base.errors import * class TestBaseColumn(object): @classmethod def setup_class(self): self.name = "Property Band gap" self.role = "Input" self.group_by_key = False self.units = "eV" def test_base_column_validates_role(self): try: column = BaseColumn(name=self.name, role="asdf") assert False, "Base column should validate role" except CitrinationClientError: pass
CitrineInformatics/python-citrination-client
citrination_client/models/columns/tests/test_base.py
citrination_client/models/columns/real.py
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """External Account Credentials. This module provides credentials that exchange workload identity pool external credentials for Google access tokens. This facilitates accessing Google Cloud Platform resources from on-prem and non-Google Cloud platforms (e.g. AWS, Microsoft Azure, OIDC identity providers), using native credentials retrieved from the current environment without the need to copy, save and manage long-lived service account credentials. Specifically, this is intended to use access tokens acquired using the GCP STS token exchange endpoint following the `OAuth 2.0 Token Exchange`_ spec. .. _OAuth 2.0 Token Exchange: https://tools.ietf.org/html/rfc8693 """ import abc import copy import datetime import json import re import six from google.auth import _helpers from google.auth import credentials from google.auth import exceptions from google.auth import impersonated_credentials from google.oauth2 import sts from google.oauth2 import utils # External account JSON type identifier. _EXTERNAL_ACCOUNT_JSON_TYPE = "external_account" # The token exchange grant_type used for exchanging credentials. _STS_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:token-exchange" # The token exchange requested_token_type. This is always an access_token. _STS_REQUESTED_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token" # Cloud resource manager URL used to retrieve project information. _CLOUD_RESOURCE_MANAGER = "https://cloudresourcemanager.googleapis.com/v1/projects/" @six.add_metaclass(abc.ABCMeta) class Credentials(credentials.Scoped, credentials.CredentialsWithQuotaProject): """Base class for all external account credentials. This is used to instantiate Credentials for exchanging external account credentials for Google access token and authorizing requests to Google APIs. The base class implements the common logic for exchanging external account credentials for Google access tokens. """ def __init__( self, audience, subject_token_type, token_url, credential_source, service_account_impersonation_url=None, client_id=None, client_secret=None, quota_project_id=None, scopes=None, default_scopes=None, ): """Instantiates an external account credentials object. Args: audience (str): The STS audience field. subject_token_type (str): The subject token type. token_url (str): The STS endpoint URL. credential_source (Mapping): The credential source dictionary. service_account_impersonation_url (Optional[str]): The optional service account impersonation generateAccessToken URL. client_id (Optional[str]): The optional client ID. client_secret (Optional[str]): The optional client secret. quota_project_id (Optional[str]): The optional quota project ID. scopes (Optional[Sequence[str]]): Optional scopes to request during the authorization grant. default_scopes (Optional[Sequence[str]]): Default scopes passed by a Google client library. Use 'scopes' for user-defined scopes. Raises: google.auth.exceptions.RefreshError: If the generateAccessToken endpoint returned an error. """ super(Credentials, self).__init__() self._audience = audience self._subject_token_type = subject_token_type self._token_url = token_url self._credential_source = credential_source self._service_account_impersonation_url = service_account_impersonation_url self._client_id = client_id self._client_secret = client_secret self._quota_project_id = quota_project_id self._scopes = scopes self._default_scopes = default_scopes if self._client_id: self._client_auth = utils.ClientAuthentication( utils.ClientAuthType.basic, self._client_id, self._client_secret ) else: self._client_auth = None self._sts_client = sts.Client(self._token_url, self._client_auth) if self._service_account_impersonation_url: self._impersonated_credentials = self._initialize_impersonated_credentials() else: self._impersonated_credentials = None self._project_id = None @property def info(self): """Generates the dictionary representation of the current credentials. Returns: Mapping: The dictionary representation of the credentials. This is the reverse of "from_info" defined on the subclasses of this class. It is useful for serializing the current credentials so it can deserialized later. """ config_info = { "type": _EXTERNAL_ACCOUNT_JSON_TYPE, "audience": self._audience, "subject_token_type": self._subject_token_type, "token_url": self._token_url, "service_account_impersonation_url": self._service_account_impersonation_url, "credential_source": copy.deepcopy(self._credential_source), "quota_project_id": self._quota_project_id, "client_id": self._client_id, "client_secret": self._client_secret, } return {key: value for key, value in config_info.items() if value is not None} @property def service_account_email(self): """Returns the service account email if service account impersonation is used. Returns: Optional[str]: The service account email if impersonation is used. Otherwise None is returned. """ if self._service_account_impersonation_url: # Parse email from URL. The formal looks as follows: # https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken url = self._service_account_impersonation_url start_index = url.rfind("/") end_index = url.find(":generateAccessToken") if start_index != -1 and end_index != -1 and start_index < end_index: start_index = start_index + 1 return url[start_index:end_index] return None @property def is_user(self): """Returns whether the credentials represent a user (True) or workload (False). Workloads behave similarly to service accounts. Currently workloads will use service account impersonation but will eventually not require impersonation. As a result, this property is more reliable than the service account email property in determining if the credentials represent a user or workload. Returns: bool: True if the credentials represent a user. False if they represent a workload. """ # If service account impersonation is used, the credentials will always represent a # service account. if self._service_account_impersonation_url: return False # Workforce pools representing users have the following audience format: # //iam.googleapis.com/locations/$location/workforcePools/$poolId/providers/$providerId p = re.compile(r"//iam\.googleapis\.com/locations/[^/]+/workforcePools/") if p.match(self._audience): return True return False @property def requires_scopes(self): """Checks if the credentials requires scopes. Returns: bool: True if there are no scopes set otherwise False. """ return not self._scopes and not self._default_scopes @property def project_number(self): """Optional[str]: The project number corresponding to the workload identity pool.""" # STS audience pattern: # //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... components = self._audience.split("/") try: project_index = components.index("projects") if project_index + 1 < len(components): return components[project_index + 1] or None except ValueError: return None @_helpers.copy_docstring(credentials.Scoped) def with_scopes(self, scopes, default_scopes=None): return self.__class__( audience=self._audience, subject_token_type=self._subject_token_type, token_url=self._token_url, credential_source=self._credential_source, service_account_impersonation_url=self._service_account_impersonation_url, client_id=self._client_id, client_secret=self._client_secret, quota_project_id=self._quota_project_id, scopes=scopes, default_scopes=default_scopes, ) @abc.abstractmethod def retrieve_subject_token(self, request): """Retrieves the subject token using the credential_source object. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. Returns: str: The retrieved subject token. """ # pylint: disable=missing-raises-doc # (pylint doesn't recognize that this is abstract) raise NotImplementedError("retrieve_subject_token must be implemented") def get_project_id(self, request): """Retrieves the project ID corresponding to the workload identity pool. When not determinable, None is returned. This is introduced to support the current pattern of using the Auth library: credentials, project_id = google.auth.default() The resource may not have permission (resourcemanager.projects.get) to call this API or the required scopes may not be selected: https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes Args: request (google.auth.transport.Request): A callable used to make HTTP requests. Returns: Optional[str]: The project ID corresponding to the workload identity pool if determinable. """ if self._project_id: # If already retrieved, return the cached project ID value. return self._project_id scopes = self._scopes if self._scopes is not None else self._default_scopes # Scopes are required in order to retrieve a valid access token. if self.project_number and scopes: headers = {} url = _CLOUD_RESOURCE_MANAGER + self.project_number self.before_request(request, "GET", url, headers) response = request(url=url, method="GET", headers=headers) response_body = ( response.data.decode("utf-8") if hasattr(response.data, "decode") else response.data ) response_data = json.loads(response_body) if response.status == 200: # Cache result as this field is immutable. self._project_id = response_data.get("projectId") return self._project_id return None @_helpers.copy_docstring(credentials.Credentials) def refresh(self, request): scopes = self._scopes if self._scopes is not None else self._default_scopes if self._impersonated_credentials: self._impersonated_credentials.refresh(request) self.token = self._impersonated_credentials.token self.expiry = self._impersonated_credentials.expiry else: now = _helpers.utcnow() response_data = self._sts_client.exchange_token( request=request, grant_type=_STS_GRANT_TYPE, subject_token=self.retrieve_subject_token(request), subject_token_type=self._subject_token_type, audience=self._audience, scopes=scopes, requested_token_type=_STS_REQUESTED_TOKEN_TYPE, ) self.token = response_data.get("access_token") lifetime = datetime.timedelta(seconds=response_data.get("expires_in")) self.expiry = now + lifetime @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject) def with_quota_project(self, quota_project_id): # Return copy of instance with the provided quota project ID. return self.__class__( audience=self._audience, subject_token_type=self._subject_token_type, token_url=self._token_url, credential_source=self._credential_source, service_account_impersonation_url=self._service_account_impersonation_url, client_id=self._client_id, client_secret=self._client_secret, quota_project_id=quota_project_id, scopes=self._scopes, default_scopes=self._default_scopes, ) def _initialize_impersonated_credentials(self): """Generates an impersonated credentials. For more details, see `projects.serviceAccounts.generateAccessToken`_. .. _projects.serviceAccounts.generateAccessToken: https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateAccessToken Returns: impersonated_credentials.Credential: The impersonated credentials object. Raises: google.auth.exceptions.RefreshError: If the generateAccessToken endpoint returned an error. """ # Return copy of instance with no service account impersonation. source_credentials = self.__class__( audience=self._audience, subject_token_type=self._subject_token_type, token_url=self._token_url, credential_source=self._credential_source, service_account_impersonation_url=None, client_id=self._client_id, client_secret=self._client_secret, quota_project_id=self._quota_project_id, scopes=self._scopes, default_scopes=self._default_scopes, ) # Determine target_principal. target_principal = self.service_account_email if not target_principal: raise exceptions.RefreshError( "Unable to determine target principal from service account impersonation URL." ) scopes = self._scopes if self._scopes is not None else self._default_scopes # Initialize and return impersonated credentials. return impersonated_credentials.Credentials( source_credentials=source_credentials, target_principal=target_principal, target_scopes=scopes, quota_project_id=self._quota_project_id, iam_endpoint_override=self._service_account_impersonation_url, )
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import json import mock import pytest import six from six.moves import http_client from six.moves import urllib from google.auth import _helpers from google.auth import _jwt_async as jwt from google.auth import exceptions from google.oauth2 import _client as sync_client from google.oauth2 import _client_async as _client from tests.oauth2 import test__client as test_client def make_request(response_data, status=http_client.OK): response = mock.AsyncMock(spec=["transport.Response"]) response.status = status data = json.dumps(response_data).encode("utf-8") response.data = mock.AsyncMock(spec=["__call__", "read"]) response.data.read = mock.AsyncMock(spec=["__call__"], return_value=data) response.content = mock.AsyncMock(spec=["__call__"], return_value=data) request = mock.AsyncMock(spec=["transport.Request"]) request.return_value = response return request @pytest.mark.asyncio async def test__token_endpoint_request(): request = make_request({"test": "response"}) result = await _client._token_endpoint_request( request, "http://example.com", {"test": "params"} ) # Check request call request.assert_called_with( method="POST", url="http://example.com", headers={"Content-Type": "application/x-www-form-urlencoded"}, body="test=params".encode("utf-8"), ) # Check result assert result == {"test": "response"} @pytest.mark.asyncio async def test__token_endpoint_request_json(): request = make_request({"test": "response"}) access_token = "access_token" result = await _client._token_endpoint_request( request, "http://example.com", {"test": "params"}, access_token=access_token, use_json=True, ) # Check request call request.assert_called_with( method="POST", url="http://example.com", headers={ "Content-Type": "application/json", "Authorization": "Bearer access_token", }, body=b'{"test": "params"}', ) # Check result assert result == {"test": "response"} @pytest.mark.asyncio async def test__token_endpoint_request_error(): request = make_request({}, status=http_client.BAD_REQUEST) with pytest.raises(exceptions.RefreshError): await _client._token_endpoint_request(request, "http://example.com", {}) @pytest.mark.asyncio async def test__token_endpoint_request_internal_failure_error(): request = make_request( {"error_description": "internal_failure"}, status=http_client.BAD_REQUEST ) with pytest.raises(exceptions.RefreshError): await _client._token_endpoint_request( request, "http://example.com", {"error_description": "internal_failure"} ) request = make_request( {"error": "internal_failure"}, status=http_client.BAD_REQUEST ) with pytest.raises(exceptions.RefreshError): await _client._token_endpoint_request( request, "http://example.com", {"error": "internal_failure"} ) def verify_request_params(request, params): request_body = request.call_args[1]["body"].decode("utf-8") request_params = urllib.parse.parse_qs(request_body) for key, value in six.iteritems(params): assert request_params[key][0] == value @mock.patch("google.auth._helpers.utcnow", return_value=datetime.datetime.min) @pytest.mark.asyncio async def test_jwt_grant(utcnow): request = make_request( {"access_token": "token", "expires_in": 500, "extra": "data"} ) token, expiry, extra_data = await _client.jwt_grant( request, "http://example.com", "assertion_value" ) # Check request call verify_request_params( request, {"grant_type": sync_client._JWT_GRANT_TYPE, "assertion": "assertion_value"}, ) # Check result assert token == "token" assert expiry == utcnow() + datetime.timedelta(seconds=500) assert extra_data["extra"] == "data" @pytest.mark.asyncio async def test_jwt_grant_no_access_token(): request = make_request( { # No access token. "expires_in": 500, "extra": "data", } ) with pytest.raises(exceptions.RefreshError): await _client.jwt_grant(request, "http://example.com", "assertion_value") @pytest.mark.asyncio async def test_id_token_jwt_grant(): now = _helpers.utcnow() id_token_expiry = _helpers.datetime_to_secs(now) id_token = jwt.encode(test_client.SIGNER, {"exp": id_token_expiry}).decode("utf-8") request = make_request({"id_token": id_token, "extra": "data"}) token, expiry, extra_data = await _client.id_token_jwt_grant( request, "http://example.com", "assertion_value" ) # Check request call verify_request_params( request, {"grant_type": sync_client._JWT_GRANT_TYPE, "assertion": "assertion_value"}, ) # Check result assert token == id_token # JWT does not store microseconds now = now.replace(microsecond=0) assert expiry == now assert extra_data["extra"] == "data" @pytest.mark.asyncio async def test_id_token_jwt_grant_no_access_token(): request = make_request( { # No access token. "expires_in": 500, "extra": "data", } ) with pytest.raises(exceptions.RefreshError): await _client.id_token_jwt_grant( request, "http://example.com", "assertion_value" ) @mock.patch("google.auth._helpers.utcnow", return_value=datetime.datetime.min) @pytest.mark.asyncio async def test_refresh_grant(unused_utcnow): request = make_request( { "access_token": "token", "refresh_token": "new_refresh_token", "expires_in": 500, "extra": "data", } ) token, refresh_token, expiry, extra_data = await _client.refresh_grant( request, "http://example.com", "refresh_token", "client_id", "client_secret", rapt_token="rapt_token", ) # Check request call verify_request_params( request, { "grant_type": sync_client._REFRESH_GRANT_TYPE, "refresh_token": "refresh_token", "client_id": "client_id", "client_secret": "client_secret", "rapt": "rapt_token", }, ) # Check result assert token == "token" assert refresh_token == "new_refresh_token" assert expiry == datetime.datetime.min + datetime.timedelta(seconds=500) assert extra_data["extra"] == "data" @mock.patch("google.auth._helpers.utcnow", return_value=datetime.datetime.min) @pytest.mark.asyncio async def test_refresh_grant_with_scopes(unused_utcnow): request = make_request( { "access_token": "token", "refresh_token": "new_refresh_token", "expires_in": 500, "extra": "data", "scope": test_client.SCOPES_AS_STRING, } ) token, refresh_token, expiry, extra_data = await _client.refresh_grant( request, "http://example.com", "refresh_token", "client_id", "client_secret", test_client.SCOPES_AS_LIST, ) # Check request call. verify_request_params( request, { "grant_type": sync_client._REFRESH_GRANT_TYPE, "refresh_token": "refresh_token", "client_id": "client_id", "client_secret": "client_secret", "scope": test_client.SCOPES_AS_STRING, }, ) # Check result. assert token == "token" assert refresh_token == "new_refresh_token" assert expiry == datetime.datetime.min + datetime.timedelta(seconds=500) assert extra_data["extra"] == "data" @pytest.mark.asyncio async def test_refresh_grant_no_access_token(): request = make_request( { # No access token. "refresh_token": "new_refresh_token", "expires_in": 500, "extra": "data", } ) with pytest.raises(exceptions.RefreshError): await _client.refresh_grant( request, "http://example.com", "refresh_token", "client_id", "client_secret" )
googleapis/google-auth-library-python
tests_async/oauth2/test__client_async.py
google/auth/external_account.py
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright © 2009- The Spyder Development Team # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) # ----------------------------------------------------------------------------- """Provides QtQuickWidgets classes and functions.""" # Local imports from . import PYQT5, PYSIDE2, PythonQtError if PYQT5: from PyQt5.QtQuickWidgets import * elif PYSIDE2: from PySide2.QtQuickWidgets import * else: raise PythonQtError('No Qt bindings could be found')
from __future__ import absolute_import import pytest from qtpy import PYSIDE2 @pytest.mark.skipif(not PYSIDE2, reason="Only available by default in PySide2") def test_qtcharts(): """Test the qtpy.QtCharts namespace""" from qtpy import QtCharts assert QtCharts.QtCharts.QChart is not None
goanpeca/qtpy
qtpy/tests/test_qtcharts.py
qtpy/QtQuickWidgets.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A collection of functions for checking various XML-related strings for standards compliance. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from ...extern.six.moves import range, urllib import re def check_id(ID): """ Returns `True` if *ID* is a valid XML ID. """ return re.match(r"^[A-Za-z_][A-Za-z0-9_\.\-]*$", ID) is not None def fix_id(ID): """ Given an arbitrary string, create one that can be used as an xml id. This is rather simplistic at the moment, since it just replaces non-valid characters with underscores. """ if re.match(r"^[A-Za-z_][A-Za-z0-9_\.\-]*$", ID): return ID if len(ID): corrected = ID if not len(corrected) or re.match('^[^A-Za-z_]$', corrected[0]): corrected = '_' + corrected corrected = (re.sub(r"[^A-Za-z_]", '_', corrected[0]) + re.sub(r"[^A-Za-z0-9_\.\-]", "_", corrected[1:])) return corrected return '' _token_regex = r"(?![\r\l\t ])[^\r\l\t]*(?![\r\l\t ])" def check_token(token): """ Returns `True` if *token* is a valid XML token, as defined by XML Schema Part 2. """ return (token == '' or re.match( r"[^\r\n\t ]?([^\r\n\t ]| [^\r\n\t ])*[^\r\n\t ]?$", token) is not None) def check_mime_content_type(content_type): """ Returns `True` if *content_type* is a valid MIME content type (syntactically at least), as defined by RFC 2045. """ ctrls = ''.join(chr(x) for x in range(0, 0x20)) token_regex = '[^()<>@,;:\\\"/[\\]?= {}\x7f]+'.format(ctrls) return re.match( r'(?P<type>{})/(?P<subtype>{})$'.format(token_regex, token_regex), content_type) is not None def check_anyuri(uri): """ Returns `True` if *uri* is a valid URI as defined in RFC 2396. """ if (re.match( (r"(([a-zA-Z][0-9a-zA-Z+\-\.]*:)?/{0,2}[0-9a-zA-Z;" + r"/?:@&=+$\.\-_!~*'()%]+)?(#[0-9a-zA-Z;/?:@&=+$\.\-_!~*'()%]+)?"), uri) is None): return False try: urllib.parse.urlparse(uri) except Exception: return False return True
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import pytest from ... import units as u from ..builtin_frames import ICRS, Galactic, Galactocentric from .. import builtin_frames as bf from ...tests.helper import quantity_allclose from ..errors import ConvertError from .. import representation as r def test_api(): # transform observed Barycentric velocities to full-space Galactocentric gc_frame = Galactocentric() icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=101*u.pc, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(gc_frame) # transform a set of ICRS proper motions to Galactic icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr) icrs.transform_to(Galactic) # transform a Barycentric RV to a GSR RV icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=1.*u.pc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(Galactocentric) all_kwargs = [ dict(ra=37.4*u.deg, dec=-55.8*u.deg), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) ] @pytest.mark.parametrize('kwargs', all_kwargs) def test_all_arg_options(kwargs): # Above is a list of all possible valid combinations of arguments. # Here we do a simple thing and just verify that passing them in, we have # access to the relevant attributes from the resulting object icrs = ICRS(**kwargs) gal = icrs.transform_to(Galactic) repr_gal = repr(gal) for k in kwargs: getattr(icrs, k) if 'pm_ra_cosdec' in kwargs: # should have both assert 'pm_l_cosb' in repr_gal assert 'pm_b' in repr_gal assert 'mas / yr' in repr_gal if 'radial_velocity' not in kwargs: assert 'radial_velocity' not in repr_gal if 'radial_velocity' in kwargs: assert 'radial_velocity' in repr_gal assert 'km / s' in repr_gal if 'pm_ra_cosdec' not in kwargs: assert 'pm_l_cosb' not in repr_gal assert 'pm_b' not in repr_gal @pytest.mark.parametrize('cls,lon,lat', [ [bf.ICRS, 'ra', 'dec'], [bf.FK4, 'ra', 'dec'], [bf.FK4NoETerms, 'ra', 'dec'], [bf.FK5, 'ra', 'dec'], [bf.GCRS, 'ra', 'dec'], [bf.HCRS, 'ra', 'dec'], [bf.LSR, 'ra', 'dec'], [bf.CIRS, 'ra', 'dec'], [bf.Galactic, 'l', 'b'], [bf.AltAz, 'az', 'alt'], [bf.Supergalactic, 'sgl', 'sgb'], [bf.GalacticLSR, 'l', 'b'], [bf.HeliocentricTrueEcliptic, 'lon', 'lat'], [bf.GeocentricTrueEcliptic, 'lon', 'lat'], [bf.BarycentricTrueEcliptic, 'lon', 'lat'], [bf.PrecessedGeocentric, 'ra', 'dec'] ]) def test_expected_arg_names(cls, lon, lat): kwargs = {lon: 37.4*u.deg, lat: -55.8*u.deg, 'distance': 150*u.pc, 'pm_{0}_cos{1}'.format(lon, lat): -21.2*u.mas/u.yr, 'pm_{0}'.format(lat): 17.1*u.mas/u.yr, 'radial_velocity': 105.7*u.km/u.s} frame = cls(**kwargs) # these data are extracted from the vizier copy of XHIP: # http://vizier.u-strasbg.fr/viz-bin/VizieR-3?-source=+V/137A/XHIP _xhip_head = """ ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ R D pmRA pmDE Di pmGLon pmGLat RV U V W HIP AJ2000 (deg) EJ2000 (deg) (mas/yr) (mas/yr) GLon (deg) GLat (deg) st (pc) (mas/yr) (mas/yr) (km/s) (km/s) (km/s) (km/s) ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ """[1:-1] _xhip_data = """ 19 000.05331690 +38.30408633 -3.17 -15.37 112.00026470 -23.47789171 247.12 -6.40 -14.33 6.30 7.3 2.0 -17.9 20 000.06295067 +23.52928427 36.11 -22.48 108.02779304 -37.85659811 95.90 29.35 -30.78 37.80 -19.3 16.1 -34.2 21 000.06623581 +08.00723430 61.48 -0.23 101.69697120 -52.74179515 183.68 58.06 -20.23 -11.72 -45.2 -30.9 -1.3 24917 080.09698238 -33.39874984 -4.30 13.40 236.92324669 -32.58047131 107.38 -14.03 -1.15 36.10 -22.4 -21.3 -19.9 59207 182.13915108 +65.34963517 18.17 5.49 130.04157185 51.18258601 56.00 -18.98 -0.49 5.70 1.5 6.1 4.4 87992 269.60730667 +36.87462906 -89.58 72.46 62.98053142 25.90148234 129.60 45.64 105.79 -4.00 -39.5 -15.8 56.7 115110 349.72322473 -28.74087144 48.86 -9.25 23.00447250 -69.52799804 116.87 -8.37 -49.02 15.00 -16.8 -12.2 -23.6 """[1:-1] # in principal we could parse the above as a table, but doing it "manually" # makes this test less tied to Table working correctly @pytest.mark.parametrize('hip,ra,dec,pmra,pmdec,glon,glat,dist,pmglon,pmglat,rv,U,V,W', [[float(val) for val in row.split()] for row in _xhip_data.split('\n')]) def test_xhip_galactic(hip, ra, dec, pmra, pmdec, glon, glat, dist, pmglon, pmglat, rv, U, V, W): i = ICRS(ra*u.deg, dec*u.deg, dist*u.pc, pm_ra_cosdec=pmra*u.marcsec/u.yr, pm_dec=pmdec*u.marcsec/u.yr, radial_velocity=rv*u.km/u.s) g = i.transform_to(Galactic) # precision is limited by 2-deciimal digit string representation of pms assert quantity_allclose(g.pm_l_cosb, pmglon*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) assert quantity_allclose(g.pm_b, pmglat*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) # make sure UVW also makes sense uvwg = g.cartesian.differentials['s'] # precision is limited by 1-decimal digit string representation of vels assert quantity_allclose(uvwg.d_x, U*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_y, V*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_z, W*u.km/u.s, atol=.1*u.km/u.s) @pytest.mark.parametrize('kwargs,expect_success', [ [dict(ra=37.4*u.deg, dec=-55.8*u.deg), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), True], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s), True] ]) def test_frame_affinetransform(kwargs, expect_success): """There are already tests in test_transformations.py that check that an AffineTransform fails without full-space data, but this just checks that things work as expected at the frame level as well. """ icrs = ICRS(**kwargs) if expect_success: gc = icrs.transform_to(Galactocentric) else: with pytest.raises(ConvertError): icrs.transform_to(Galactocentric) def test_differential_cls_arg(): """ Test passing in an explicit differential class to the initializer or changing the differential class via set_representation_cls """ from ..builtin_frames import ICRS icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls=r.UnitSphericalDifferential) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls={'s': r.UnitSphericalDifferential}) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra_cosdec=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr) icrs.set_representation_cls(s=r.UnitSphericalDifferential) assert quantity_allclose(icrs.pm_ra, 20*u.mas/u.yr) # incompatible representation and differential with pytest.raises(TypeError): ICRS(ra=1*u.deg, dec=60*u.deg, v_x=1*u.km/u.s, v_y=-2*u.km/u.s, v_z=-2*u.km/u.s, differential_cls=r.CartesianDifferential) # specify both icrs = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, v_x=1*u.km/u.s, v_y=2*u.km/u.s, v_z=3*u.km/u.s, representation=r.CartesianRepresentation, differential_cls=r.CartesianDifferential) assert icrs.x == 1*u.pc assert icrs.y == 2*u.pc assert icrs.z == 3*u.pc assert icrs.v_x == 1*u.km/u.s assert icrs.v_y == 2*u.km/u.s assert icrs.v_z == 3*u.km/u.s def test_slicing_preserves_differential(): icrs = ICRS(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) icrs2 = icrs.reshape(1,1)[:1,0] for name in icrs.representation_component_names.keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0] for name in icrs.get_representation_component_names('s').keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0]
AustereCuriosity/astropy
astropy/coordinates/tests/test_frames_with_velocity.py
astropy/utils/xml/check.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The astropy.utils.iers package provides access to the tables provided by the International Earth Rotation and Reference Systems Service, in particular allowing interpolation of published UT1-UTC values for given times. These are used in `astropy.time` to provide UT1 values. The polar motions are also used for determining earth orientation for celestial-to-terrestrial coordinate transformations (in `astropy.coordinates`). """ from __future__ import (absolute_import, division, print_function, unicode_literals) from warnings import warn try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse import numpy as np from ... import config as _config from ... import units as u from ...table import Table, QTable from ...utils.data import get_pkg_data_filename, clear_download_cache from ... import utils from ...utils.exceptions import AstropyWarning from ...tests import disable_internet __all__ = ['Conf', 'conf', 'IERS', 'IERS_B', 'IERS_A', 'IERS_Auto', 'FROM_IERS_B', 'FROM_IERS_A', 'FROM_IERS_A_PREDICTION', 'TIME_BEFORE_IERS_RANGE', 'TIME_BEYOND_IERS_RANGE', 'IERS_A_FILE', 'IERS_A_URL', 'IERS_A_README', 'IERS_B_FILE', 'IERS_B_URL', 'IERS_B_README', 'IERSRangeError', 'IERSStaleWarning'] # IERS-A default file name, URL, and ReadMe with content description IERS_A_FILE = 'finals2000A.all' IERS_A_URL = 'http://maia.usno.navy.mil/ser7/finals2000A.all' IERS_A_README = get_pkg_data_filename('data/ReadMe.finals2000A') # IERS-B default file name, URL, and ReadMe with content description IERS_B_FILE = get_pkg_data_filename('data/eopc04_IAU2000.62-now') IERS_B_URL = 'http://hpiers.obspm.fr/iers/eop/eopc04/eopc04_IAU2000.62-now' IERS_B_README = get_pkg_data_filename('data/ReadMe.eopc04_IAU2000') # Status/source values returned by IERS.ut1_utc FROM_IERS_B = 0 FROM_IERS_A = 1 FROM_IERS_A_PREDICTION = 2 TIME_BEFORE_IERS_RANGE = -1 TIME_BEYOND_IERS_RANGE = -2 MJD_ZERO = 2400000.5 INTERPOLATE_ERROR = """\ interpolating from IERS_Auto using predictive values that are more than {} days old. Normally you should not see this error because this class automatically downloads the latest IERS-A table. Perhaps you are offline? If you understand what you are doing then this error can be suppressed by setting the auto_max_age configuration variable to ``None``: from astropy.utils.iers import conf conf.auto_max_age = None """ def download_file(*args, **kwargs): """ Overload astropy.utils.data.download_file within iers module to use a custom (longer) wait time. This just passes through ``*args`` and ``**kwargs`` after temporarily setting the download_file remote timeout to the local ``iers.conf.remote_timeout`` value. """ with utils.data.conf.set_temp('remote_timeout', conf.remote_timeout): return utils.data.download_file(*args, **kwargs) class IERSStaleWarning(AstropyWarning): pass class Conf(_config.ConfigNamespace): """ Configuration parameters for `astropy.utils.iers`. """ auto_download = _config.ConfigItem( True, 'Enable auto-downloading of the latest IERS data. If set to False ' 'then the local IERS-B file will be used by default. Default is True.') auto_max_age = _config.ConfigItem( 30.0, 'Maximum age (days) of predictive data before auto-downloading. Default is 30.') iers_auto_url = _config.ConfigItem( IERS_A_URL, 'URL for auto-downloading IERS file data.') remote_timeout = _config.ConfigItem( 10.0, 'Remote timeout downloading IERS file data (seconds).') conf = Conf() # If internet is off for testing then do not download. This # makes most tests fall back to using the built-in IERS-B table. conf.auto_download &= not disable_internet.INTERNET_OFF class IERSRangeError(IndexError): """ Any error for when dates are outside of the valid range for IERS """ class IERS(QTable): """Generic IERS table class, defining interpolation functions. Sub-classed from `astropy.table.QTable`. The table should hold columns 'MJD', 'UT1_UTC', and 'PM_x'/'PM_y'. """ iers_table = None @classmethod def open(cls, file=None, cache=False, **kwargs): """Open an IERS table, reading it from a file if not loaded before. Parameters ---------- file : str or None full local or network path to the ascii file holding IERS data, for passing on to the ``read`` class methods (further optional arguments that are available for some IERS subclasses can be added). If None, use the default location from the ``read`` class method. cache : bool Whether to use cache. Defaults to False, since IERS files are regularly updated. Returns ------- An IERS table class instance Notes ----- On the first call in a session, the table will be memoized (in the ``iers_table`` class attribute), and further calls to ``open`` will return this stored table if ``file=None`` (the default). If a table needs to be re-read from disk, pass on an explicit file location or use the (sub-class) close method and re-open. If the location is a network location it is first downloaded via download_file. For the IERS class itself, an IERS_B sub-class instance is opened. """ if file is not None or cls.iers_table is None: if file is not None: if urlparse(file).netloc: kwargs.update(file=download_file(file, cache=cache)) else: kwargs.update(file=file) cls.iers_table = cls.read(**kwargs) return cls.iers_table @classmethod def close(cls): """Remove the IERS table from the class. This allows the table to be re-read from disk during one's session (e.g., if one finds it is out of date and has updated the file). """ cls.iers_table = None def mjd_utc(self, jd1, jd2=0.): """Turn a time to MJD, returning integer and fractional parts. Parameters ---------- jd1 : float, array, or Time first part of two-part JD, or Time object jd2 : float or array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. Returns ------- mjd : float or array integer part of MJD utc : float or array fractional part of MJD """ try: # see if this is a Time object jd1, jd2 = jd1.utc.jd1, jd1.utc.jd2 except Exception: pass mjd = np.floor(jd1 - MJD_ZERO + jd2) utc = jd1 - (MJD_ZERO+mjd) + jd2 return mjd, utc def ut1_utc(self, jd1, jd2=0., return_status=False): """Interpolate UT1-UTC corrections in IERS Table for given dates. Parameters ---------- jd1 : float, float array, or Time object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- ut1_utc : float or float array UT1-UTC, interpolated in IERS Table status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['UT1_UTC'], self.ut1_utc_source if return_status else None) def pm_xy(self, jd1, jd2=0., return_status=False): """Interpolate polar motions from IERS Table for given dates. Parameters ---------- jd1 : float, float array, or Time object first part of two-part JD, or Time object jd2 : float or float array, optional second part of two-part JD. Default is 0., ignored if jd1 is `~astropy.time.Time`. return_status : bool Whether to return status values. If False (default), raise ``IERSRangeError`` if any time is out of the range covered by the IERS table. Returns ------- PM_x : Quantity with angle units x component of polar motion for the requested times PM_y : Quantity with angle units y component of polar motion for the requested times status : int or int array Status values (if ``return_status``=``True``):: ``iers.FROM_IERS_B`` ``iers.FROM_IERS_A`` ``iers.FROM_IERS_A_PREDICTION`` ``iers.TIME_BEFORE_IERS_RANGE`` ``iers.TIME_BEYOND_IERS_RANGE`` """ return self._interpolate(jd1, jd2, ['PM_x', 'PM_y'], self.pm_source if return_status else None) def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd): """ Check that the indices from interpolation match those after clipping to the valid table range. This method gets overridden in the IERS_Auto class because it has different requirements. """ if np.any(indices_orig != indices_clipped): raise IERSRangeError('(some) times are outside of range covered ' 'by IERS table.') def _interpolate(self, jd1, jd2, columns, source=None): mjd, utc = self.mjd_utc(jd1, jd2) # enforce array is_scalar = not hasattr(mjd, '__array__') or mjd.ndim == 0 if is_scalar: mjd = np.array([mjd]) utc = np.array([utc]) self._refresh_table_as_needed(mjd) # For typical format, will always find a match (since MJD are integer) # hence, important to define which side we will be; this ensures # self['MJD'][i-1]<=mjd<self['MJD'][i] i = np.searchsorted(self['MJD'].value, mjd, side='right') # Get index to MJD at or just below given mjd, clipping to ensure we # stay in range of table (status will be set below for those outside) i1 = np.clip(i, 1, len(self) - 1) i0 = i1 - 1 mjd_0, mjd_1 = self['MJD'][i0].value, self['MJD'][i1].value results = [] for column in columns: val_0, val_1 = self[column][i0], self[column][i1] d_val = val_1 - val_0 if column == 'UT1_UTC': # Check & correct for possible leap second (correcting diff., # not 1st point, since jump can only happen right at 2nd point) d_val -= d_val.round() # Linearly interpolate (which is what TEMPO does for UT1-UTC, but # may want to follow IERS gazette #13 for more precise # interpolation and correction for tidal effects; # http://maia.usno.navy.mil/iers-gaz13) val = val_0 + (mjd - mjd_0 + utc) / (mjd_1 - mjd_0) * d_val # Do not extrapolate outside range, instead just propagate last values. val[i == 0] = self[column][0] val[i == len(self)] = self[column][-1] if is_scalar: val = val[0] results.append(val) if source: # Set status to source, using the routine passed in. status = source(i1) # Check for out of range status[i == 0] = TIME_BEFORE_IERS_RANGE status[i == len(self)] = TIME_BEYOND_IERS_RANGE if is_scalar: status = status[0] results.append(status) return results else: self._check_interpolate_indices(i1, i, np.max(mjd)) return results[0] if len(results) == 1 else results def _refresh_table_as_needed(self, mjd): """ Potentially update the IERS table in place depending on the requested time values in ``mdj`` and the time span of the table. The base behavior is not to update the table. ``IERS_Auto`` overrides this method. """ pass def ut1_utc_source(self, i): """Source for UT1-UTC. To be overridden by subclass.""" return np.zeros_like(i) def pm_source(self, i): """Source for polar motion. To be overridden by subclass.""" return np.zeros_like(i) @property def time_now(self): """ Property to provide the current time, but also allow for explicitly setting the _time_now attribute for testing purposes. """ from astropy.time import Time try: return self._time_now except Exception: return Time.now() class IERS_A(IERS): """IERS Table class targeted to IERS A, provided by USNO. These include rapid turnaround and predicted times. See http://maia.usno.navy.mil/ Notes ----- The IERS A file is not part of astropy. It can be downloaded from ``iers.IERS_A_URL``. See ``iers.__doc__`` for instructions on how to use it in ``Time``, etc. """ iers_table = None @classmethod def _combine_a_b_columns(cls, iers_a): """ Return a new table with appropriate combination of IERS_A and B columns. """ # IERS A has some rows at the end that hold nothing but dates & MJD # presumably to be filled later. Exclude those a priori -- there # should at least be a predicted UT1-UTC and PM! table = iers_a[~iers_a['UT1_UTC_A'].mask & ~iers_a['PolPMFlag_A'].mask] # This does nothing for IERS_A, but allows IERS_Auto to ensure the # IERS B values in the table are consistent with the true ones. table = cls._substitute_iers_b(table) # Run np.where on the data from the table columns, since in numpy 1.9 # it otherwise returns an only partially initialized column. table['UT1_UTC'] = np.where(table['UT1_UTC_B'].mask, table['UT1_UTC_A'].data, table['UT1_UTC_B'].data) # Ensure the unit is correct, for later column conversion to Quantity. table['UT1_UTC'].unit = table['UT1_UTC_A'].unit table['UT1Flag'] = np.where(table['UT1_UTC_B'].mask, table['UT1Flag_A'].data, 'B') # Repeat for polar motions. table['PM_x'] = np.where(table['PM_X_B'].mask, table['PM_x_A'].data, table['PM_X_B'].data) table['PM_x'].unit = table['PM_x_A'].unit table['PM_y'] = np.where(table['PM_Y_B'].mask, table['PM_y_A'].data, table['PM_Y_B'].data) table['PM_y'].unit = table['PM_y_A'].unit table['PolPMFlag'] = np.where(table['PM_X_B'].mask, table['PolPMFlag_A'].data, 'B') # Get the table index for the first row that has predictive values # PolPMFlag_A IERS (I) or Prediction (P) flag for # Bull. A polar motion values # UT1Flag_A IERS (I) or Prediction (P) flag for # Bull. A UT1-UTC values is_predictive = (table['UT1Flag_A'] == 'P') | (table['PolPMFlag_A'] == 'P') table.meta['predictive_index'] = np.min(np.flatnonzero(is_predictive)) table.meta['predictive_mjd'] = table['MJD'][table.meta['predictive_index']] return table @classmethod def _substitute_iers_b(cls, table): # See documentation in IERS_Auto. return table @classmethod def read(cls, file=None, readme=None): """Read IERS-A table from a finals2000a.* file provided by USNO. Parameters ---------- file : str full path to ascii file holding IERS-A data. Defaults to ``iers.IERS_A_FILE``. readme : str full path to ascii file holding CDS-style readme. Defaults to package version, ``iers.IERS_A_README``. Returns ------- ``IERS_A`` class instance """ if file is None: file = IERS_A_FILE if readme is None: readme = IERS_A_README # Read in as a regular Table, including possible masked columns. # Columns will be filled and converted to Quantity in cls.__init__. iers_a = Table.read(file, format='cds', readme=readme) # Combine the A and B data for UT1-UTC and PM columns table = cls._combine_a_b_columns(iers_a) table.meta['data_path'] = file table.meta['readme_path'] = readme # Fill any masked values, and convert to a QTable. return cls(table.filled()) def ut1_utc_source(self, i): """Set UT1-UTC source flag for entries in IERS table""" ut1flag = self['UT1Flag'][i] source = np.ones_like(i) * FROM_IERS_B source[ut1flag == 'I'] = FROM_IERS_A source[ut1flag == 'P'] = FROM_IERS_A_PREDICTION return source def pm_source(self, i): """Set polar motion source flag for entries in IERS table""" pmflag = self['PolPMFlag'][i] source = np.ones_like(i) * FROM_IERS_B source[pmflag == 'I'] = FROM_IERS_A source[pmflag == 'P'] = FROM_IERS_A_PREDICTION return source class IERS_B(IERS): """IERS Table class targeted to IERS B, provided by IERS itself. These are final values; see http://www.iers.org/ Notes ----- If the package IERS B file (```iers.IERS_B_FILE``) is out of date, a new version can be downloaded from ``iers.IERS_B_URL``. """ iers_table = None @classmethod def read(cls, file=None, readme=None, data_start=14): """Read IERS-B table from a eopc04_iau2000.* file provided by IERS. Parameters ---------- file : str full path to ascii file holding IERS-B data. Defaults to package version, ``iers.IERS_B_FILE``. readme : str full path to ascii file holding CDS-style readme. Defaults to package version, ``iers.IERS_B_README``. data_start : int starting row. Default is 14, appropriate for standard IERS files. Returns ------- ``IERS_B`` class instance """ if file is None: file = IERS_B_FILE if readme is None: readme = IERS_B_README # Read in as a regular Table, including possible masked columns. # Columns will be filled and converted to Quantity in cls.__init__. iers_b = Table.read(file, format='cds', readme=readme, data_start=data_start) return cls(iers_b.filled()) def ut1_utc_source(self, i): """Set UT1-UTC source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B def pm_source(self, i): """Set PM source flag for entries in IERS table""" return np.ones_like(i) * FROM_IERS_B class IERS_Auto(IERS_A): """ Provide most-recent IERS data and automatically handle downloading of updated values as necessary. """ iers_table = None @classmethod def open(cls): """If the configuration setting ``astropy.utils.iers.conf.auto_download`` is set to True (default), then open a recent version of the IERS-A table with predictions for UT1-UTC and polar motion out to approximately one year from now. If the available version of this file is older than ``astropy.utils.iers.conf.auto_max_age`` days old (or non-existent) then it will be downloaded over the network and cached. If the configuration setting ``astropy.utils.iers.conf.auto_download`` is set to False then ``astropy.utils.iers.IERS()`` is returned. This is normally the IERS-B table that is supplied with astropy. On the first call in a session, the table will be memoized (in the ``iers_table`` class attribute), and further calls to ``open`` will return this stored table. Returns ------- `~astropy.table.QTable` instance with IERS (Earth rotation) data columns """ if not conf.auto_download: cls.iers_table = IERS.open() return cls.iers_table if cls.iers_table is not None: # If the URL has changed, we need to redownload the file, so we # should ignore the internally cached version. if cls.iers_table.meta.get('data_url') == conf.iers_auto_url: return cls.iers_table try: filename = download_file(conf.iers_auto_url, cache=True) except Exception as err: # Issue a warning here, perhaps user is offline. An exception # will be raised downstream when actually trying to interpolate # predictive values. warn(AstropyWarning('failed to download {}, using local IERS-B: {}' .format(conf.iers_auto_url, str(err)))) cls.iers_table = IERS.open() return cls.iers_table cls.iers_table = cls.read(file=filename) cls.iers_table.meta['data_url'] = str(conf.iers_auto_url) return cls.iers_table def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd): """Check that the indices from interpolation match those after clipping to the valid table range. The IERS_Auto class is exempted as long as it has sufficiently recent available data so the clipped interpolation is always within the confidence bounds of current Earth rotation knowledge. """ predictive_mjd = self.meta['predictive_mjd'] # See explanation in _refresh_table_as_needed for these conditions auto_max_age = (conf.auto_max_age if conf.auto_max_age is not None else np.finfo(np.float).max) if (max_input_mjd > predictive_mjd and self.time_now.mjd - predictive_mjd > auto_max_age): raise ValueError(INTERPOLATE_ERROR) def _refresh_table_as_needed(self, mjd): """Potentially update the IERS table in place depending on the requested time values in ``mjd`` and the time span of the table. For IERS_Auto the behavior is that the table is refreshed from the IERS server if both the following apply: - Any of the requested IERS values are predictive. The IERS-A table contains predictive data out for a year after the available definitive values. - The first predictive values are at least ``conf.auto_max_age days`` old. In other words the IERS-A table was created by IERS long enough ago that it can be considered stale for predictions. """ max_input_mjd = np.max(mjd) now_mjd = self.time_now.mjd # IERS-A table contains predictive data out for a year after # the available definitive values. fpi = self.meta['predictive_index'] predictive_mjd = self.meta['predictive_mjd'] # Update table in place if necessary auto_max_age = (conf.auto_max_age if conf.auto_max_age is not None else np.finfo(np.float).max) # If auto_max_age is smaller than IERS update time then repeated downloads may # occur without getting updated values (giving a IERSStaleWarning). if auto_max_age < 10: raise ValueError('IERS auto_max_age configuration value must be larger than 10 days') if (max_input_mjd > predictive_mjd and now_mjd - predictive_mjd > auto_max_age): # Get the latest version try: clear_download_cache(conf.iers_auto_url) filename = download_file(conf.iers_auto_url, cache=True) except Exception as err: # Issue a warning here, perhaps user is offline. An exception # will be raised downstream when actually trying to interpolate # predictive values. warn(AstropyWarning('failed to download {}: {}.\nA coordinate or time-related ' 'calculation might be compromised or fail because the dates are ' 'not covered by the available IERS file. See the ' '"IERS data access" section of the astropy documentation ' 'for additional information on working offline.' .format(conf.iers_auto_url, str(err)))) return new_table = self.__class__.read(file=filename) # New table has new values? if new_table['MJD'][-1] > self['MJD'][-1]: # Replace *replace* current values from the first predictive index through # the end of the current table. This replacement is much faster than just # deleting all rows and then using add_row for the whole duration. new_fpi = np.searchsorted(new_table['MJD'].value, predictive_mjd, side='right') n_replace = len(self) - fpi self[fpi:] = new_table[new_fpi:new_fpi + n_replace] # Sanity check for continuity if new_table['MJD'][new_fpi + n_replace] - self['MJD'][-1] != 1.0 * u.d: raise ValueError('unexpected gap in MJD when refreshing IERS table') # Now add new rows in place for row in new_table[new_fpi + n_replace:]: self.add_row(row) self.meta.update(new_table.meta) else: warn(IERSStaleWarning( 'IERS_Auto predictive values are older than {} days but downloading ' 'the latest table did not find newer values'.format(conf.auto_max_age))) @classmethod def _substitute_iers_b(cls, table): """Substitute IERS B values with those from a real IERS B table. IERS-A has IERS-B values included, but for reasons unknown these do not match the latest IERS-B values (see comments in #4436). Here, we use the bundled astropy IERS-B table to overwrite the values in the downloaded IERS-A table. """ iers_b = IERS_B.open() # Substitute IERS-B values for existing B values in IERS-A table mjd_b = table['MJD'][~table['UT1_UTC_B'].mask] i0 = np.searchsorted(iers_b['MJD'].value, mjd_b[0], side='left') i1 = np.searchsorted(iers_b['MJD'].value, mjd_b[-1], side='right') iers_b = iers_b[i0:i1] n_iers_b = len(iers_b) # If there is overlap then replace IERS-A values from available IERS-B if n_iers_b > 0: # Sanity check that we are overwriting the correct values if not np.allclose(table['MJD'][:n_iers_b], iers_b['MJD'].value): raise ValueError('unexpected mismatch when copying ' 'IERS-B values into IERS-A table.') # Finally do the overwrite table['UT1_UTC_B'][:n_iers_b] = iers_b['UT1_UTC'].value table['PM_X_B'][:n_iers_b] = iers_b['PM_x'].value table['PM_Y_B'][:n_iers_b] = iers_b['PM_y'].value return table # by default for IERS class, read IERS-B table IERS.read = IERS_B.read
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import pytest from ... import units as u from ..builtin_frames import ICRS, Galactic, Galactocentric from .. import builtin_frames as bf from ...tests.helper import quantity_allclose from ..errors import ConvertError from .. import representation as r def test_api(): # transform observed Barycentric velocities to full-space Galactocentric gc_frame = Galactocentric() icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=101*u.pc, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(gc_frame) # transform a set of ICRS proper motions to Galactic icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr) icrs.transform_to(Galactic) # transform a Barycentric RV to a GSR RV icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=1.*u.pc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(Galactocentric) all_kwargs = [ dict(ra=37.4*u.deg, dec=-55.8*u.deg), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) ] @pytest.mark.parametrize('kwargs', all_kwargs) def test_all_arg_options(kwargs): # Above is a list of all possible valid combinations of arguments. # Here we do a simple thing and just verify that passing them in, we have # access to the relevant attributes from the resulting object icrs = ICRS(**kwargs) gal = icrs.transform_to(Galactic) repr_gal = repr(gal) for k in kwargs: getattr(icrs, k) if 'pm_ra_cosdec' in kwargs: # should have both assert 'pm_l_cosb' in repr_gal assert 'pm_b' in repr_gal assert 'mas / yr' in repr_gal if 'radial_velocity' not in kwargs: assert 'radial_velocity' not in repr_gal if 'radial_velocity' in kwargs: assert 'radial_velocity' in repr_gal assert 'km / s' in repr_gal if 'pm_ra_cosdec' not in kwargs: assert 'pm_l_cosb' not in repr_gal assert 'pm_b' not in repr_gal @pytest.mark.parametrize('cls,lon,lat', [ [bf.ICRS, 'ra', 'dec'], [bf.FK4, 'ra', 'dec'], [bf.FK4NoETerms, 'ra', 'dec'], [bf.FK5, 'ra', 'dec'], [bf.GCRS, 'ra', 'dec'], [bf.HCRS, 'ra', 'dec'], [bf.LSR, 'ra', 'dec'], [bf.CIRS, 'ra', 'dec'], [bf.Galactic, 'l', 'b'], [bf.AltAz, 'az', 'alt'], [bf.Supergalactic, 'sgl', 'sgb'], [bf.GalacticLSR, 'l', 'b'], [bf.HeliocentricTrueEcliptic, 'lon', 'lat'], [bf.GeocentricTrueEcliptic, 'lon', 'lat'], [bf.BarycentricTrueEcliptic, 'lon', 'lat'], [bf.PrecessedGeocentric, 'ra', 'dec'] ]) def test_expected_arg_names(cls, lon, lat): kwargs = {lon: 37.4*u.deg, lat: -55.8*u.deg, 'distance': 150*u.pc, 'pm_{0}_cos{1}'.format(lon, lat): -21.2*u.mas/u.yr, 'pm_{0}'.format(lat): 17.1*u.mas/u.yr, 'radial_velocity': 105.7*u.km/u.s} frame = cls(**kwargs) # these data are extracted from the vizier copy of XHIP: # http://vizier.u-strasbg.fr/viz-bin/VizieR-3?-source=+V/137A/XHIP _xhip_head = """ ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ R D pmRA pmDE Di pmGLon pmGLat RV U V W HIP AJ2000 (deg) EJ2000 (deg) (mas/yr) (mas/yr) GLon (deg) GLat (deg) st (pc) (mas/yr) (mas/yr) (km/s) (km/s) (km/s) (km/s) ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ """[1:-1] _xhip_data = """ 19 000.05331690 +38.30408633 -3.17 -15.37 112.00026470 -23.47789171 247.12 -6.40 -14.33 6.30 7.3 2.0 -17.9 20 000.06295067 +23.52928427 36.11 -22.48 108.02779304 -37.85659811 95.90 29.35 -30.78 37.80 -19.3 16.1 -34.2 21 000.06623581 +08.00723430 61.48 -0.23 101.69697120 -52.74179515 183.68 58.06 -20.23 -11.72 -45.2 -30.9 -1.3 24917 080.09698238 -33.39874984 -4.30 13.40 236.92324669 -32.58047131 107.38 -14.03 -1.15 36.10 -22.4 -21.3 -19.9 59207 182.13915108 +65.34963517 18.17 5.49 130.04157185 51.18258601 56.00 -18.98 -0.49 5.70 1.5 6.1 4.4 87992 269.60730667 +36.87462906 -89.58 72.46 62.98053142 25.90148234 129.60 45.64 105.79 -4.00 -39.5 -15.8 56.7 115110 349.72322473 -28.74087144 48.86 -9.25 23.00447250 -69.52799804 116.87 -8.37 -49.02 15.00 -16.8 -12.2 -23.6 """[1:-1] # in principal we could parse the above as a table, but doing it "manually" # makes this test less tied to Table working correctly @pytest.mark.parametrize('hip,ra,dec,pmra,pmdec,glon,glat,dist,pmglon,pmglat,rv,U,V,W', [[float(val) for val in row.split()] for row in _xhip_data.split('\n')]) def test_xhip_galactic(hip, ra, dec, pmra, pmdec, glon, glat, dist, pmglon, pmglat, rv, U, V, W): i = ICRS(ra*u.deg, dec*u.deg, dist*u.pc, pm_ra_cosdec=pmra*u.marcsec/u.yr, pm_dec=pmdec*u.marcsec/u.yr, radial_velocity=rv*u.km/u.s) g = i.transform_to(Galactic) # precision is limited by 2-deciimal digit string representation of pms assert quantity_allclose(g.pm_l_cosb, pmglon*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) assert quantity_allclose(g.pm_b, pmglat*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) # make sure UVW also makes sense uvwg = g.cartesian.differentials['s'] # precision is limited by 1-decimal digit string representation of vels assert quantity_allclose(uvwg.d_x, U*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_y, V*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_z, W*u.km/u.s, atol=.1*u.km/u.s) @pytest.mark.parametrize('kwargs,expect_success', [ [dict(ra=37.4*u.deg, dec=-55.8*u.deg), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), True], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s), True] ]) def test_frame_affinetransform(kwargs, expect_success): """There are already tests in test_transformations.py that check that an AffineTransform fails without full-space data, but this just checks that things work as expected at the frame level as well. """ icrs = ICRS(**kwargs) if expect_success: gc = icrs.transform_to(Galactocentric) else: with pytest.raises(ConvertError): icrs.transform_to(Galactocentric) def test_differential_cls_arg(): """ Test passing in an explicit differential class to the initializer or changing the differential class via set_representation_cls """ from ..builtin_frames import ICRS icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls=r.UnitSphericalDifferential) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls={'s': r.UnitSphericalDifferential}) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra_cosdec=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr) icrs.set_representation_cls(s=r.UnitSphericalDifferential) assert quantity_allclose(icrs.pm_ra, 20*u.mas/u.yr) # incompatible representation and differential with pytest.raises(TypeError): ICRS(ra=1*u.deg, dec=60*u.deg, v_x=1*u.km/u.s, v_y=-2*u.km/u.s, v_z=-2*u.km/u.s, differential_cls=r.CartesianDifferential) # specify both icrs = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, v_x=1*u.km/u.s, v_y=2*u.km/u.s, v_z=3*u.km/u.s, representation=r.CartesianRepresentation, differential_cls=r.CartesianDifferential) assert icrs.x == 1*u.pc assert icrs.y == 2*u.pc assert icrs.z == 3*u.pc assert icrs.v_x == 1*u.km/u.s assert icrs.v_y == 2*u.km/u.s assert icrs.v_z == 3*u.km/u.s def test_slicing_preserves_differential(): icrs = ICRS(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) icrs2 = icrs.reshape(1,1)[:1,0] for name in icrs.representation_component_names.keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0] for name in icrs.get_representation_component_names('s').keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0]
AustereCuriosity/astropy
astropy/coordinates/tests/test_frames_with_velocity.py
astropy/utils/iers/iers.py
"""A Collection of useful miscellaneous functions. misc.py: Collection of useful miscellaneous functions. :Author: Hannes Breytenbach (hannes@saao.ac.za) """ from __future__ import absolute_import, division, print_function import collections import itertools import operator from ...extern.six.moves import zip, map, filter def first_true_index(iterable, pred=None, default=None): """find the first index position for the which the callable pred returns True""" if pred is None: func = operator.itemgetter(1) else: func = lambda x: pred(x[1]) ii = next(filter(func, enumerate(iterable)), default) # either index-item pair or default return ii[0] if ii else default def first_false_index(iterable, pred=None, default=None): """find the first index position for the which the callable pred returns False""" if pred is None: func = operator.not_ else: func = lambda x: not pred(x) return first_true_index(iterable, func, default) def sortmore(*args, **kw): """ Sorts any number of lists according to: optionally given item sorting key function(s) and/or a global sorting key function. Parameters ---------- One or more lists Keywords -------- globalkey : None revert to sorting by key function globalkey : callable Sort by evaluated value for all items in the lists (call signature of this function needs to be such that it accepts an argument tuple of items from each list. eg.: globalkey = lambda *l: sum(l) will order all the lists by the sum of the items from each list if key: None sorting done by value of first input list (in this case the objects in the first iterable need the comparison methods __lt__ etc...) if key: callable sorting done by value of key(item) for items in first iterable if key: tuple sorting done by value of (key(item_0), ..., key(item_n)) for items in the first n iterables (where n is the length of the key tuple) i.e. the first callable is the primary sorting criterion, and the rest act as tie-breakers. Returns ------- Sorted lists Examples -------- Capture sorting indeces: l = list('CharacterS') In [1]: sortmore( l, range(len(l)) ) Out[1]: (['C', 'S', 'a', 'a', 'c', 'e', 'h', 'r', 'r', 't'], [0, 9, 2, 4, 5, 7, 1, 3, 8, 6]) In [2]: sortmore( l, range(len(l)), key=str.lower ) Out[2]: (['a', 'a', 'C', 'c', 'e', 'h', 'r', 'r', 'S', 't'], [2, 4, 0, 5, 7, 1, 3, 8, 9, 6]) """ first = list(args[0]) if not len(first): return args globalkey = kw.get('globalkey') key = kw.get('key') if key is None: if globalkey: # if global sort function given and no local (secondary) key given, ==> no tiebreakers key = lambda x: 0 else: key = lambda x: x # if no global sort and no local sort keys given, sort by item values if globalkey is None: globalkey = lambda *x: 0 if not isinstance(globalkey, collections.Callable): raise ValueError('globalkey needs to be callable') if isinstance(key, collections.Callable): k = lambda x: (globalkey(*x), key(x[0])) elif isinstance(key, tuple): key = (k if k else lambda x: 0 for k in key) k = lambda x: (globalkey(*x),) + tuple(f(z) for (f, z) in zip(key, x)) else: raise KeyError( "kw arg 'key' should be None, callable, or a sequence of callables, not {}" .format(type(key))) res = sorted(list(zip(*args)), key=k) if 'order' in kw: if kw['order'].startswith(('descend', 'reverse')): res = reversed(res) return tuple(map(list, zip(*res))) def groupmore(func=None, *its): """Extends the itertools.groupby functionality to arbitrary number of iterators.""" if not func: func = lambda x: x its = sortmore(*its, key=func) nfunc = lambda x: func(x[0]) zipper = itertools.groupby(zip(*its), nfunc) unzipper = ((key, zip(*groups)) for key, groups in zipper) return unzipper
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import pytest from ... import units as u from ..builtin_frames import ICRS, Galactic, Galactocentric from .. import builtin_frames as bf from ...tests.helper import quantity_allclose from ..errors import ConvertError from .. import representation as r def test_api(): # transform observed Barycentric velocities to full-space Galactocentric gc_frame = Galactocentric() icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=101*u.pc, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(gc_frame) # transform a set of ICRS proper motions to Galactic icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr) icrs.transform_to(Galactic) # transform a Barycentric RV to a GSR RV icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=1.*u.pc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(Galactocentric) all_kwargs = [ dict(ra=37.4*u.deg, dec=-55.8*u.deg), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) ] @pytest.mark.parametrize('kwargs', all_kwargs) def test_all_arg_options(kwargs): # Above is a list of all possible valid combinations of arguments. # Here we do a simple thing and just verify that passing them in, we have # access to the relevant attributes from the resulting object icrs = ICRS(**kwargs) gal = icrs.transform_to(Galactic) repr_gal = repr(gal) for k in kwargs: getattr(icrs, k) if 'pm_ra_cosdec' in kwargs: # should have both assert 'pm_l_cosb' in repr_gal assert 'pm_b' in repr_gal assert 'mas / yr' in repr_gal if 'radial_velocity' not in kwargs: assert 'radial_velocity' not in repr_gal if 'radial_velocity' in kwargs: assert 'radial_velocity' in repr_gal assert 'km / s' in repr_gal if 'pm_ra_cosdec' not in kwargs: assert 'pm_l_cosb' not in repr_gal assert 'pm_b' not in repr_gal @pytest.mark.parametrize('cls,lon,lat', [ [bf.ICRS, 'ra', 'dec'], [bf.FK4, 'ra', 'dec'], [bf.FK4NoETerms, 'ra', 'dec'], [bf.FK5, 'ra', 'dec'], [bf.GCRS, 'ra', 'dec'], [bf.HCRS, 'ra', 'dec'], [bf.LSR, 'ra', 'dec'], [bf.CIRS, 'ra', 'dec'], [bf.Galactic, 'l', 'b'], [bf.AltAz, 'az', 'alt'], [bf.Supergalactic, 'sgl', 'sgb'], [bf.GalacticLSR, 'l', 'b'], [bf.HeliocentricTrueEcliptic, 'lon', 'lat'], [bf.GeocentricTrueEcliptic, 'lon', 'lat'], [bf.BarycentricTrueEcliptic, 'lon', 'lat'], [bf.PrecessedGeocentric, 'ra', 'dec'] ]) def test_expected_arg_names(cls, lon, lat): kwargs = {lon: 37.4*u.deg, lat: -55.8*u.deg, 'distance': 150*u.pc, 'pm_{0}_cos{1}'.format(lon, lat): -21.2*u.mas/u.yr, 'pm_{0}'.format(lat): 17.1*u.mas/u.yr, 'radial_velocity': 105.7*u.km/u.s} frame = cls(**kwargs) # these data are extracted from the vizier copy of XHIP: # http://vizier.u-strasbg.fr/viz-bin/VizieR-3?-source=+V/137A/XHIP _xhip_head = """ ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ R D pmRA pmDE Di pmGLon pmGLat RV U V W HIP AJ2000 (deg) EJ2000 (deg) (mas/yr) (mas/yr) GLon (deg) GLat (deg) st (pc) (mas/yr) (mas/yr) (km/s) (km/s) (km/s) (km/s) ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ """[1:-1] _xhip_data = """ 19 000.05331690 +38.30408633 -3.17 -15.37 112.00026470 -23.47789171 247.12 -6.40 -14.33 6.30 7.3 2.0 -17.9 20 000.06295067 +23.52928427 36.11 -22.48 108.02779304 -37.85659811 95.90 29.35 -30.78 37.80 -19.3 16.1 -34.2 21 000.06623581 +08.00723430 61.48 -0.23 101.69697120 -52.74179515 183.68 58.06 -20.23 -11.72 -45.2 -30.9 -1.3 24917 080.09698238 -33.39874984 -4.30 13.40 236.92324669 -32.58047131 107.38 -14.03 -1.15 36.10 -22.4 -21.3 -19.9 59207 182.13915108 +65.34963517 18.17 5.49 130.04157185 51.18258601 56.00 -18.98 -0.49 5.70 1.5 6.1 4.4 87992 269.60730667 +36.87462906 -89.58 72.46 62.98053142 25.90148234 129.60 45.64 105.79 -4.00 -39.5 -15.8 56.7 115110 349.72322473 -28.74087144 48.86 -9.25 23.00447250 -69.52799804 116.87 -8.37 -49.02 15.00 -16.8 -12.2 -23.6 """[1:-1] # in principal we could parse the above as a table, but doing it "manually" # makes this test less tied to Table working correctly @pytest.mark.parametrize('hip,ra,dec,pmra,pmdec,glon,glat,dist,pmglon,pmglat,rv,U,V,W', [[float(val) for val in row.split()] for row in _xhip_data.split('\n')]) def test_xhip_galactic(hip, ra, dec, pmra, pmdec, glon, glat, dist, pmglon, pmglat, rv, U, V, W): i = ICRS(ra*u.deg, dec*u.deg, dist*u.pc, pm_ra_cosdec=pmra*u.marcsec/u.yr, pm_dec=pmdec*u.marcsec/u.yr, radial_velocity=rv*u.km/u.s) g = i.transform_to(Galactic) # precision is limited by 2-deciimal digit string representation of pms assert quantity_allclose(g.pm_l_cosb, pmglon*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) assert quantity_allclose(g.pm_b, pmglat*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) # make sure UVW also makes sense uvwg = g.cartesian.differentials['s'] # precision is limited by 1-decimal digit string representation of vels assert quantity_allclose(uvwg.d_x, U*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_y, V*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_z, W*u.km/u.s, atol=.1*u.km/u.s) @pytest.mark.parametrize('kwargs,expect_success', [ [dict(ra=37.4*u.deg, dec=-55.8*u.deg), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), True], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s), True] ]) def test_frame_affinetransform(kwargs, expect_success): """There are already tests in test_transformations.py that check that an AffineTransform fails without full-space data, but this just checks that things work as expected at the frame level as well. """ icrs = ICRS(**kwargs) if expect_success: gc = icrs.transform_to(Galactocentric) else: with pytest.raises(ConvertError): icrs.transform_to(Galactocentric) def test_differential_cls_arg(): """ Test passing in an explicit differential class to the initializer or changing the differential class via set_representation_cls """ from ..builtin_frames import ICRS icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls=r.UnitSphericalDifferential) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls={'s': r.UnitSphericalDifferential}) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra_cosdec=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr) icrs.set_representation_cls(s=r.UnitSphericalDifferential) assert quantity_allclose(icrs.pm_ra, 20*u.mas/u.yr) # incompatible representation and differential with pytest.raises(TypeError): ICRS(ra=1*u.deg, dec=60*u.deg, v_x=1*u.km/u.s, v_y=-2*u.km/u.s, v_z=-2*u.km/u.s, differential_cls=r.CartesianDifferential) # specify both icrs = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, v_x=1*u.km/u.s, v_y=2*u.km/u.s, v_z=3*u.km/u.s, representation=r.CartesianRepresentation, differential_cls=r.CartesianDifferential) assert icrs.x == 1*u.pc assert icrs.y == 2*u.pc assert icrs.z == 3*u.pc assert icrs.v_x == 1*u.km/u.s assert icrs.v_y == 2*u.km/u.s assert icrs.v_z == 3*u.km/u.s def test_slicing_preserves_differential(): icrs = ICRS(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) icrs2 = icrs.reshape(1,1)[:1,0] for name in icrs.representation_component_names.keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0] for name in icrs.get_representation_component_names('s').keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0]
AustereCuriosity/astropy
astropy/coordinates/tests/test_frames_with_velocity.py
astropy/io/ascii/misc.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import print_function, division, absolute_import """ This file defines the classes used to represent a 'coordinate', which includes axes, ticks, tick labels, and grid lines. """ import numpy as np from matplotlib.ticker import Formatter from matplotlib.transforms import Affine2D, ScaledTranslation from matplotlib.patches import PathPatch from matplotlib import rcParams from ... import units as u from ...extern import six from .formatter_locator import AngleFormatterLocator, ScalarFormatterLocator from .ticks import Ticks from .ticklabels import TickLabels from .axislabels import AxisLabels from .grid_paths import get_lon_lat_path, get_gridline_path __all__ = ['CoordinateHelper'] def wrap_angle_at(values, coord_wrap): # On ARM processors, np.mod emits warnings if there are NaN values in the # array, although this doesn't seem to happen on other processors. with np.errstate(invalid='ignore'): return np.mod(values - coord_wrap, 360.) - (360. - coord_wrap) class CoordinateHelper(object): """ Helper class to control one of the coordinates in the :class:`~astropy.visualization.wcsaxes.WCSAxes`. Parameters ---------- parent_axes : :class:`~astropy.visualization.wcsaxes.WCSAxes` The axes the coordinate helper belongs to. parent_map : :class:`~astropy.visualization.wcsaxes.CoordinatesMap` The :class:`~astropy.visualization.wcsaxes.CoordinatesMap` object this coordinate belongs to. transform : `~matplotlib.transforms.Transform` The transform corresponding to this coordinate system. coord_index : int The index of this coordinate in the :class:`~astropy.visualization.wcsaxes.CoordinatesMap`. coord_type : {'longitude', 'latitude', 'scalar'} The type of this coordinate, which is used to determine the wrapping and boundary behavior of coordinates. Longitudes wrap at ``coord_wrap``, latitudes have to be in the range -90 to 90, and scalars are unbounded and do not wrap. coord_unit : `~astropy.units.Unit` The unit that this coordinate is in given the output of transform. coord_wrap : float The angle at which the longitude wraps (defaults to 360) frame : `~astropy.visualization.wcsaxes.frame.BaseFrame` The frame of the :class:`~astropy.visualization.wcsaxes.WCSAxes`. """ def __init__(self, parent_axes=None, parent_map=None, transform=None, coord_index=None, coord_type='scalar', coord_unit=None, coord_wrap=None, frame=None): # Keep a reference to the parent axes and the transform self.parent_axes = parent_axes self.parent_map = parent_map self.transform = transform self.coord_index = coord_index self.coord_unit = coord_unit self.frame = frame self.set_coord_type(coord_type, coord_wrap) # Initialize ticks self.dpi_transform = Affine2D() self.offset_transform = ScaledTranslation(0, 0, self.dpi_transform) self.ticks = Ticks(transform=parent_axes.transData + self.offset_transform) # Initialize tick labels self.ticklabels = TickLabels(self.frame, transform=None, # display coordinates figure=parent_axes.get_figure()) self.ticks.display_minor_ticks(False) self.minor_frequency = 5 # Initialize axis labels self.axislabels = AxisLabels(self.frame, transform=None, # display coordinates figure=parent_axes.get_figure()) # Initialize container for the grid lines self.grid_lines = [] # Initialize grid style. Take defaults from matplotlib.rcParams. # Based on matplotlib.axis.YTick._get_gridline. # # Matplotlib's gridlines use Line2D, but ours use PathPatch. # Patches take a slightly different format of linestyle argument. lines_to_patches_linestyle = {'-': 'solid', '--': 'dashed', '-.': 'dashdot', ':': 'dotted', 'none': 'none', 'None': 'none', ' ': 'none', '': 'none'} self.grid_lines_kwargs = {'visible': False, 'facecolor': 'none', 'edgecolor': rcParams['grid.color'], 'linestyle': lines_to_patches_linestyle[rcParams['grid.linestyle']], 'linewidth': rcParams['grid.linewidth'], 'alpha': rcParams.get('grid.alpha', 1.0), 'transform': self.parent_axes.transData} def grid(self, draw_grid=True, grid_type='lines', **kwargs): """ Plot grid lines for this coordinate. Standard matplotlib appearance options (color, alpha, etc.) can be passed as keyword arguments. Parameters ---------- draw_grid : bool Whether to show the gridlines grid_type : { 'lines' | 'contours' } Whether to plot the contours by determining the grid lines in world coordinates and then plotting them in world coordinates (``'lines'``) or by determining the world coordinates at many positions in the image and then drawing contours (``'contours'``). The first is recommended for 2-d images, while for 3-d (or higher dimensional) cubes, the ``'contours'`` option is recommended. """ if grid_type in ('lines', 'contours'): self._grid_type = grid_type else: raise ValueError("grid_type should be 'lines' or 'contours'") if 'color' in kwargs: kwargs['edgecolor'] = kwargs.pop('color') self.grid_lines_kwargs.update(kwargs) if self.grid_lines_kwargs['visible']: if not draw_grid: self.grid_lines_kwargs['visible'] = False else: self.grid_lines_kwargs['visible'] = True def set_coord_type(self, coord_type, coord_wrap=None): """ Set the coordinate type for the axis. Parameters ---------- coord_type : str One of 'longitude', 'latitude' or 'scalar' coord_wrap : float, optional The value to wrap at for angular coordinates """ self.coord_type = coord_type if coord_type == 'longitude' and coord_wrap is None: self.coord_wrap = 360 elif coord_type != 'longitude' and coord_wrap is not None: raise NotImplementedError('coord_wrap is not yet supported ' 'for non-longitude coordinates') else: self.coord_wrap = coord_wrap # Initialize tick formatter/locator if coord_type == 'scalar': self._coord_unit_scale = None self._formatter_locator = ScalarFormatterLocator(unit=self.coord_unit) elif coord_type in ['longitude', 'latitude']: if self.coord_unit is u.deg: self._coord_unit_scale = None else: self._coord_unit_scale = self.coord_unit.to(u.deg) self._formatter_locator = AngleFormatterLocator() else: raise ValueError("coord_type should be one of 'scalar', 'longitude', or 'latitude'") def set_major_formatter(self, formatter): """ Set the formatter to use for the major tick labels. Parameters ---------- formatter : str or Formatter The format or formatter to use. """ if isinstance(formatter, Formatter): raise NotImplementedError() # figure out how to swap out formatter elif isinstance(formatter, six.string_types): self._formatter_locator.format = formatter else: raise TypeError("formatter should be a string or a Formatter " "instance") def format_coord(self, value): """ Given the value of a coordinate, will format it according to the format of the formatter_locator. """ if not hasattr(self, "_fl_spacing"): return "" # _update_ticks has not been called yet fl = self._formatter_locator if isinstance(fl, AngleFormatterLocator): # Convert to degrees if needed if self._coord_unit_scale is not None: value *= self._coord_unit_scale if self.coord_type == 'longitude': value = wrap_angle_at(value, self.coord_wrap) value = value * u.degree value = value.to_value(fl._unit) spacing = self._fl_spacing string = fl.formatter(values=[value] * fl._unit, spacing=spacing) return string[0] def set_separator(self, separator): """ Set the separator to use for the angle major tick labels. Parameters ---------- separator : The separator between numbers in sexagesimal representation. Can be either a string or a tuple. """ if not (self._formatter_locator.__class__ == AngleFormatterLocator): raise TypeError("Separator can only be specified for angle coordinates") if isinstance(separator, six.string_types) or isinstance(separator, tuple): self._formatter_locator.sep = separator else: raise TypeError("separator should be a string or a tuple") def set_format_unit(self, unit): """ Set the unit for the major tick labels. Parameters ---------- unit : class:`~astropy.units.Unit` The unit to which the tick labels should be converted to. """ if not issubclass(unit.__class__, u.UnitBase): raise TypeError("unit should be an astropy UnitBase subclass") self._formatter_locator.format_unit = unit def set_ticks(self, values=None, spacing=None, number=None, size=None, width=None, color=None, alpha=None, exclude_overlapping=False): """ Set the location and properties of the ticks. At most one of the options from ``values``, ``spacing``, or ``number`` can be specified. Parameters ---------- values : iterable, optional The coordinate values at which to show the ticks. spacing : float, optional The spacing between ticks. number : float, optional The approximate number of ticks shown. size : float, optional The length of the ticks in points color : str or tuple A valid Matplotlib color for the ticks exclude_overlapping : bool, optional Whether to exclude tick labels that overlap over each other. """ if sum([values is None, spacing is None, number is None]) < 2: raise ValueError("At most one of values, spacing, or number should " "be specified") if values is not None: self._formatter_locator.values = values elif spacing is not None: self._formatter_locator.spacing = spacing elif number is not None: self._formatter_locator.number = number if size is not None: self.ticks.set_ticksize(size) if width is not None: self.ticks.set_linewidth(width) if color is not None: self.ticks.set_color(color) if alpha is not None: self.ticks.set_alpha(alpha) self.ticklabels.set_exclude_overlapping(exclude_overlapping) def set_ticks_position(self, position): """ Set where ticks should appear Parameters ---------- position : str The axes on which the ticks for this coordinate should appear. Should be a string containing zero or more of ``'b'``, ``'t'``, ``'l'``, ``'r'``. For example, ``'lb'`` will lead the ticks to be shown on the left and bottom axis. """ self.ticks.set_visible_axes(position) def set_ticks_visible(self, visible): """ Set whether ticks are visible or not. Parameters ---------- visible : bool The visibility of ticks. Setting as ``False`` will hide ticks along this coordinate. """ self.ticks.set_visible(visible) def set_ticklabel(self, **kwargs): """ Set the visual properties for the tick labels. Parameters ---------- kwargs Keyword arguments are passed to :class:`matplotlib.text.Text`. These can include keywords to set the ``color``, ``size``, ``weight``, and other text properties. """ self.ticklabels.set(**kwargs) def set_ticklabel_position(self, position): """ Set where tick labels should appear Parameters ---------- position : str The axes on which the tick labels for this coordinate should appear. Should be a string containing zero or more of ``'b'``, ``'t'``, ``'l'``, ``'r'``. For example, ``'lb'`` will lead the tick labels to be shown on the left and bottom axis. """ self.ticklabels.set_visible_axes(position) def set_ticklabel_visible(self, visible): """ Set whether the tick labels are visible or not. Parameters ---------- visible : bool The visibility of ticks. Setting as ``False`` will hide this coordinate's tick labels. """ self.ticklabels.set_visible(visible) def set_axislabel(self, text, minpad=1, **kwargs): """ Set the text and optionally visual properties for the axis label. Parameters ---------- text : str The axis label text. minpad : float, optional The padding for the label in terms of axis label font size. kwargs Keywords are passed to :class:`matplotlib.text.Text`. These can include keywords to set the ``color``, ``size``, ``weight``, and other text properties. """ self.axislabels.set_text(text) self.axislabels.set_minpad(minpad) self.axislabels.set(**kwargs) def get_axislabel(self): """ Get the text for the axis label Returns ------- label : str The axis label """ return self.axislabels.get_text() def set_axislabel_position(self, position): """ Set where axis labels should appear Parameters ---------- position : str The axes on which the axis label for this coordinate should appear. Should be a string containing zero or more of ``'b'``, ``'t'``, ``'l'``, ``'r'``. For example, ``'lb'`` will lead the axis label to be shown on the left and bottom axis. """ self.axislabels.set_visible_axes(position) @property def locator(self): return self._formatter_locator.locator @property def formatter(self): return self._formatter_locator.formatter def _draw_grid(self, renderer): renderer.open_group('grid lines') self._update_ticks() if self.grid_lines_kwargs['visible']: if self._grid_type == 'lines': self._update_grid_lines() else: self._update_grid_contour() if self._grid_type == 'lines': frame_patch = self.frame.patch for path in self.grid_lines: p = PathPatch(path, **self.grid_lines_kwargs) p.set_clip_path(frame_patch) p.draw(renderer) elif self._grid is not None: for line in self._grid.collections: line.set(**self.grid_lines_kwargs) line.draw(renderer) renderer.close_group('grid lines') def _draw_ticks(self, renderer, bboxes, ticklabels_bbox): renderer.open_group('ticks') self.ticks.draw(renderer) self.ticklabels.draw(renderer, bboxes=bboxes, ticklabels_bbox=ticklabels_bbox) renderer.close_group('ticks') def _draw_axislabels(self, renderer, bboxes, ticklabels_bbox, visible_ticks): renderer.open_group('axis labels') self.axislabels.draw(renderer, bboxes=bboxes, ticklabels_bbox_list=ticklabels_bbox, visible_ticks=visible_ticks) renderer.close_group('axis labels') def _update_ticks(self): # TODO: this method should be optimized for speed # Here we determine the location and rotation of all the ticks. For # each axis, we can check the intersections for the specific # coordinate and once we have the tick positions, we can use the WCS # to determine the rotations. # Find the range of coordinates in all directions coord_range = self.parent_map.get_coord_range() # First find the ticks we want to show tick_world_coordinates, self._fl_spacing = self.locator(*coord_range[self.coord_index]) if self.ticks.get_display_minor_ticks(): minor_ticks_w_coordinates = self._formatter_locator.minor_locator(self._fl_spacing, self.get_minor_frequency(), *coord_range[self.coord_index]) # We want to allow non-standard rectangular frames, so we just rely on # the parent axes to tell us what the bounding frame is. from . import conf frame = self.frame.sample(conf.frame_boundary_samples) self.ticks.clear() self.ticklabels.clear() self.lblinfo = [] self.lbl_world = [] # Look up parent axes' transform from data to figure coordinates. # # See: # http://matplotlib.org/users/transforms_tutorial.html#the-transformation-pipeline transData = self.parent_axes.transData invertedTransLimits = transData.inverted() for axis, spine in six.iteritems(frame): # Determine tick rotation in display coordinates and compare to # the normal angle in display coordinates. pixel0 = spine.data world0 = spine.world[:, self.coord_index] world0 = self.transform.transform(pixel0)[:, self.coord_index] axes0 = transData.transform(pixel0) # Advance 2 pixels in figure coordinates pixel1 = axes0.copy() pixel1[:, 0] += 2.0 pixel1 = invertedTransLimits.transform(pixel1) world1 = self.transform.transform(pixel1)[:, self.coord_index] # Advance 2 pixels in figure coordinates pixel2 = axes0.copy() pixel2[:, 1] += 2.0 if self.frame.origin == 'lower' else -2.0 pixel2 = invertedTransLimits.transform(pixel2) world2 = self.transform.transform(pixel2)[:, self.coord_index] dx = (world1 - world0) dy = (world2 - world0) # Rotate by 90 degrees dx, dy = -dy, dx if self._coord_unit_scale is not None: dx *= self._coord_unit_scale dy *= self._coord_unit_scale if self.coord_type == 'longitude': # Here we wrap at 180 not self.coord_wrap since we want to # always ensure abs(dx) < 180 and abs(dy) < 180 dx = wrap_angle_at(dx, 180.) dy = wrap_angle_at(dy, 180.) tick_angle = np.degrees(np.arctan2(dy, dx)) normal_angle_full = np.hstack([spine.normal_angle, spine.normal_angle[-1]]) with np.errstate(invalid='ignore'): reset = (((normal_angle_full - tick_angle) % 360 > 90.) & ((tick_angle - normal_angle_full) % 360 > 90.)) tick_angle[reset] -= 180. # We find for each interval the starting and ending coordinate, # ensuring that we take wrapping into account correctly for # longitudes. w1 = spine.world[:-1, self.coord_index] w2 = spine.world[1:, self.coord_index] if self._coord_unit_scale is not None: w1 = w1 * self._coord_unit_scale w2 = w2 * self._coord_unit_scale if self.coord_type == 'longitude': w1 = wrap_angle_at(w1, self.coord_wrap) w2 = wrap_angle_at(w2, self.coord_wrap) with np.errstate(invalid='ignore'): w1[w2 - w1 > 180.] += 360 w2[w1 - w2 > 180.] += 360 # For longitudes, we need to check ticks as well as ticks + 360, # since the above can produce pairs such as 359 to 361 or 0.5 to # 1.5, both of which would match a tick at 0.75. Otherwise we just # check the ticks determined above. self._compute_ticks(tick_world_coordinates, spine, axis, w1, w2, tick_angle) if self.ticks.get_display_minor_ticks(): self._compute_ticks(minor_ticks_w_coordinates, spine, axis, w1, w2, tick_angle, ticks='minor') # format tick labels, add to scene text = self.formatter(self.lbl_world * tick_world_coordinates.unit, spacing=self._fl_spacing) for kwargs, txt in zip(self.lblinfo, text): self.ticklabels.add(text=txt, **kwargs) def _compute_ticks(self, tick_world_coordinates, spine, axis, w1, w2, tick_angle, ticks='major'): tick_world_coordinates_values = tick_world_coordinates.value if self.coord_type == 'longitude': tick_world_coordinates_values = np.hstack([tick_world_coordinates_values, tick_world_coordinates_values + 360]) for t in tick_world_coordinates_values: # Find steps where a tick is present. We have to check # separately for the case where the tick falls exactly on the # frame points, otherwise we'll get two matches, one for w1 and # one for w2. with np.errstate(invalid='ignore'): intersections = np.hstack([np.nonzero((t - w1) == 0)[0], np.nonzero(((t - w1) * (t - w2)) < 0)[0]]) # But we also need to check for intersection with the last w2 if t - w2[-1] == 0: intersections = np.append(intersections, len(w2) - 1) # Loop over ticks, and find exact pixel coordinates by linear # interpolation for imin in intersections: imax = imin + 1 if np.allclose(w1[imin], w2[imin], rtol=1.e-13, atol=1.e-13): continue # tick is exactly aligned with frame else: frac = (t - w1[imin]) / (w2[imin] - w1[imin]) x_data_i = spine.data[imin, 0] + frac * (spine.data[imax, 0] - spine.data[imin, 0]) y_data_i = spine.data[imin, 1] + frac * (spine.data[imax, 1] - spine.data[imin, 1]) x_pix_i = spine.pixel[imin, 0] + frac * (spine.pixel[imax, 0] - spine.pixel[imin, 0]) y_pix_i = spine.pixel[imin, 1] + frac * (spine.pixel[imax, 1] - spine.pixel[imin, 1]) delta_angle = tick_angle[imax] - tick_angle[imin] if delta_angle > 180.: delta_angle -= 360. elif delta_angle < -180.: delta_angle += 360. angle_i = tick_angle[imin] + frac * delta_angle if self.coord_type == 'longitude': world = wrap_angle_at(t, self.coord_wrap) else: world = t if ticks == 'major': self.ticks.add(axis=axis, pixel=(x_data_i, y_data_i), world=world, angle=angle_i, axis_displacement=imin + frac) # store information to pass to ticklabels.add # it's faster to format many ticklabels at once outside # of the loop self.lblinfo.append(dict(axis=axis, pixel=(x_pix_i, y_pix_i), world=world, angle=spine.normal_angle[imin], axis_displacement=imin + frac)) self.lbl_world.append(world) else: self.ticks.add_minor(minor_axis=axis, minor_pixel=(x_data_i, y_data_i), minor_world=world, minor_angle=angle_i, minor_axis_displacement=imin + frac) def display_minor_ticks(self, display_minor_ticks): """ Display minor ticks for this coordinate. Parameters ---------- display_minor_ticks : bool Whether or not to display minor ticks. """ self.ticks.display_minor_ticks(display_minor_ticks) def get_minor_frequency(self): return self.minor_frequency def set_minor_frequency(self, frequency): """ Set the frequency of minor ticks per major ticks. Parameters ---------- frequency : int The number of minor ticks per major ticks. """ self.minor_frequency = frequency def _update_grid_lines(self): # For 3-d WCS with a correlated third axis, the *proper* way of # drawing a grid should be to find the world coordinates of all pixels # and drawing contours. What we are doing here assumes that we can # define the grid lines with just two of the coordinates (and # therefore assumes that the other coordinates are fixed and set to # the value in the slice). Here we basically assume that if the WCS # had a third axis, it has been abstracted away in the transformation. coord_range = self.parent_map.get_coord_range() tick_world_coordinates, spacing = self.locator(*coord_range[self.coord_index]) tick_world_coordinates_values = tick_world_coordinates.value n_coord = len(tick_world_coordinates_values) from . import conf n_samples = conf.grid_samples xy_world = np.zeros((n_samples * n_coord, 2)) self.grid_lines = [] for iw, w in enumerate(tick_world_coordinates_values): subset = slice(iw * n_samples, (iw + 1) * n_samples) if self.coord_index == 0: xy_world[subset, 0] = np.repeat(w, n_samples) xy_world[subset, 1] = np.linspace(coord_range[1][0], coord_range[1][1], n_samples) else: xy_world[subset, 0] = np.linspace(coord_range[0][0], coord_range[0][1], n_samples) xy_world[subset, 1] = np.repeat(w, n_samples) # We now convert all the world coordinates to pixel coordinates in a # single go rather than doing this in the gridline to path conversion # to fully benefit from vectorized coordinate transformations. # Currently xy_world is in deg, but transform function needs it in # native units if self._coord_unit_scale is not None: xy_world /= self._coord_unit_scale # Transform line to pixel coordinates pixel = self.transform.inverted().transform(xy_world) # Create round-tripped values for checking xy_world_round = self.transform.transform(pixel) for iw in range(n_coord): subset = slice(iw * n_samples, (iw + 1) * n_samples) self.grid_lines.append(self._get_gridline(xy_world[subset], pixel[subset], xy_world_round[subset])) def _get_gridline(self, xy_world, pixel, xy_world_round): if self.coord_type == 'scalar': return get_gridline_path(xy_world, pixel) else: return get_lon_lat_path(xy_world, pixel, xy_world_round) def _update_grid_contour(self): if hasattr(self, '_grid'): for line in self._grid.collections: line.remove() xmin, xmax = self.parent_axes.get_xlim() ymin, ymax = self.parent_axes.get_ylim() x, y, field = self.transform.get_coord_slices(xmin, xmax, ymin, ymax, 200, 200) coord_range = self.parent_map.get_coord_range() tick_world_coordinates, spacing = self.locator(*coord_range[self.coord_index]) field = field[self.coord_index] # tick_world_coordinates is a Quantities array and we only needs its values tick_world_coordinates_values = tick_world_coordinates.value if self.coord_type == 'longitude': # Find biggest gap in tick_world_coordinates and wrap in middle # For now just assume spacing is equal, so any mid-point will do mid = 0.5 * (tick_world_coordinates_values[0] + tick_world_coordinates_values[1]) field = wrap_angle_at(field, mid) tick_world_coordinates_values = wrap_angle_at(tick_world_coordinates_values, mid) # Replace wraps by NaN reset = (np.abs(np.diff(field[:, :-1], axis=0)) > 180) | (np.abs(np.diff(field[:-1, :], axis=1)) > 180) field[:-1, :-1][reset] = np.nan field[1:, :-1][reset] = np.nan field[:-1, 1:][reset] = np.nan field[1:, 1:][reset] = np.nan if len(tick_world_coordinates_values) > 0: self._grid = self.parent_axes.contour(x, y, field.transpose(), levels=np.sort(tick_world_coordinates_values)) else: self._grid = None
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import pytest from ... import units as u from ..builtin_frames import ICRS, Galactic, Galactocentric from .. import builtin_frames as bf from ...tests.helper import quantity_allclose from ..errors import ConvertError from .. import representation as r def test_api(): # transform observed Barycentric velocities to full-space Galactocentric gc_frame = Galactocentric() icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=101*u.pc, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(gc_frame) # transform a set of ICRS proper motions to Galactic icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr) icrs.transform_to(Galactic) # transform a Barycentric RV to a GSR RV icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=1.*u.pc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(Galactocentric) all_kwargs = [ dict(ra=37.4*u.deg, dec=-55.8*u.deg), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) ] @pytest.mark.parametrize('kwargs', all_kwargs) def test_all_arg_options(kwargs): # Above is a list of all possible valid combinations of arguments. # Here we do a simple thing and just verify that passing them in, we have # access to the relevant attributes from the resulting object icrs = ICRS(**kwargs) gal = icrs.transform_to(Galactic) repr_gal = repr(gal) for k in kwargs: getattr(icrs, k) if 'pm_ra_cosdec' in kwargs: # should have both assert 'pm_l_cosb' in repr_gal assert 'pm_b' in repr_gal assert 'mas / yr' in repr_gal if 'radial_velocity' not in kwargs: assert 'radial_velocity' not in repr_gal if 'radial_velocity' in kwargs: assert 'radial_velocity' in repr_gal assert 'km / s' in repr_gal if 'pm_ra_cosdec' not in kwargs: assert 'pm_l_cosb' not in repr_gal assert 'pm_b' not in repr_gal @pytest.mark.parametrize('cls,lon,lat', [ [bf.ICRS, 'ra', 'dec'], [bf.FK4, 'ra', 'dec'], [bf.FK4NoETerms, 'ra', 'dec'], [bf.FK5, 'ra', 'dec'], [bf.GCRS, 'ra', 'dec'], [bf.HCRS, 'ra', 'dec'], [bf.LSR, 'ra', 'dec'], [bf.CIRS, 'ra', 'dec'], [bf.Galactic, 'l', 'b'], [bf.AltAz, 'az', 'alt'], [bf.Supergalactic, 'sgl', 'sgb'], [bf.GalacticLSR, 'l', 'b'], [bf.HeliocentricTrueEcliptic, 'lon', 'lat'], [bf.GeocentricTrueEcliptic, 'lon', 'lat'], [bf.BarycentricTrueEcliptic, 'lon', 'lat'], [bf.PrecessedGeocentric, 'ra', 'dec'] ]) def test_expected_arg_names(cls, lon, lat): kwargs = {lon: 37.4*u.deg, lat: -55.8*u.deg, 'distance': 150*u.pc, 'pm_{0}_cos{1}'.format(lon, lat): -21.2*u.mas/u.yr, 'pm_{0}'.format(lat): 17.1*u.mas/u.yr, 'radial_velocity': 105.7*u.km/u.s} frame = cls(**kwargs) # these data are extracted from the vizier copy of XHIP: # http://vizier.u-strasbg.fr/viz-bin/VizieR-3?-source=+V/137A/XHIP _xhip_head = """ ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ R D pmRA pmDE Di pmGLon pmGLat RV U V W HIP AJ2000 (deg) EJ2000 (deg) (mas/yr) (mas/yr) GLon (deg) GLat (deg) st (pc) (mas/yr) (mas/yr) (km/s) (km/s) (km/s) (km/s) ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ """[1:-1] _xhip_data = """ 19 000.05331690 +38.30408633 -3.17 -15.37 112.00026470 -23.47789171 247.12 -6.40 -14.33 6.30 7.3 2.0 -17.9 20 000.06295067 +23.52928427 36.11 -22.48 108.02779304 -37.85659811 95.90 29.35 -30.78 37.80 -19.3 16.1 -34.2 21 000.06623581 +08.00723430 61.48 -0.23 101.69697120 -52.74179515 183.68 58.06 -20.23 -11.72 -45.2 -30.9 -1.3 24917 080.09698238 -33.39874984 -4.30 13.40 236.92324669 -32.58047131 107.38 -14.03 -1.15 36.10 -22.4 -21.3 -19.9 59207 182.13915108 +65.34963517 18.17 5.49 130.04157185 51.18258601 56.00 -18.98 -0.49 5.70 1.5 6.1 4.4 87992 269.60730667 +36.87462906 -89.58 72.46 62.98053142 25.90148234 129.60 45.64 105.79 -4.00 -39.5 -15.8 56.7 115110 349.72322473 -28.74087144 48.86 -9.25 23.00447250 -69.52799804 116.87 -8.37 -49.02 15.00 -16.8 -12.2 -23.6 """[1:-1] # in principal we could parse the above as a table, but doing it "manually" # makes this test less tied to Table working correctly @pytest.mark.parametrize('hip,ra,dec,pmra,pmdec,glon,glat,dist,pmglon,pmglat,rv,U,V,W', [[float(val) for val in row.split()] for row in _xhip_data.split('\n')]) def test_xhip_galactic(hip, ra, dec, pmra, pmdec, glon, glat, dist, pmglon, pmglat, rv, U, V, W): i = ICRS(ra*u.deg, dec*u.deg, dist*u.pc, pm_ra_cosdec=pmra*u.marcsec/u.yr, pm_dec=pmdec*u.marcsec/u.yr, radial_velocity=rv*u.km/u.s) g = i.transform_to(Galactic) # precision is limited by 2-deciimal digit string representation of pms assert quantity_allclose(g.pm_l_cosb, pmglon*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) assert quantity_allclose(g.pm_b, pmglat*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) # make sure UVW also makes sense uvwg = g.cartesian.differentials['s'] # precision is limited by 1-decimal digit string representation of vels assert quantity_allclose(uvwg.d_x, U*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_y, V*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_z, W*u.km/u.s, atol=.1*u.km/u.s) @pytest.mark.parametrize('kwargs,expect_success', [ [dict(ra=37.4*u.deg, dec=-55.8*u.deg), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), True], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s), True] ]) def test_frame_affinetransform(kwargs, expect_success): """There are already tests in test_transformations.py that check that an AffineTransform fails without full-space data, but this just checks that things work as expected at the frame level as well. """ icrs = ICRS(**kwargs) if expect_success: gc = icrs.transform_to(Galactocentric) else: with pytest.raises(ConvertError): icrs.transform_to(Galactocentric) def test_differential_cls_arg(): """ Test passing in an explicit differential class to the initializer or changing the differential class via set_representation_cls """ from ..builtin_frames import ICRS icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls=r.UnitSphericalDifferential) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls={'s': r.UnitSphericalDifferential}) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra_cosdec=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr) icrs.set_representation_cls(s=r.UnitSphericalDifferential) assert quantity_allclose(icrs.pm_ra, 20*u.mas/u.yr) # incompatible representation and differential with pytest.raises(TypeError): ICRS(ra=1*u.deg, dec=60*u.deg, v_x=1*u.km/u.s, v_y=-2*u.km/u.s, v_z=-2*u.km/u.s, differential_cls=r.CartesianDifferential) # specify both icrs = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, v_x=1*u.km/u.s, v_y=2*u.km/u.s, v_z=3*u.km/u.s, representation=r.CartesianRepresentation, differential_cls=r.CartesianDifferential) assert icrs.x == 1*u.pc assert icrs.y == 2*u.pc assert icrs.z == 3*u.pc assert icrs.v_x == 1*u.km/u.s assert icrs.v_y == 2*u.km/u.s assert icrs.v_z == 3*u.km/u.s def test_slicing_preserves_differential(): icrs = ICRS(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) icrs2 = icrs.reshape(1,1)[:1,0] for name in icrs.representation_component_names.keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0] for name in icrs.get_representation_component_names('s').keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0]
AustereCuriosity/astropy
astropy/coordinates/tests/test_frames_with_velocity.py
astropy/visualization/wcsaxes/coordinate_helpers.py
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # TEST_UNICODE_LITERALS from __future__ import (absolute_import, division, print_function, unicode_literals) """Test replacements for ERFA functions atciqz and aticq.""" from itertools import product from ...tests.helper import (pytest, assert_quantity_allclose as assert_allclose) from ...time import Time from ... import _erfa as erfa from .utils import randomly_sample_sphere from ..builtin_frames.utils import get_jd12, atciqz, aticq times = [Time("2014-06-25T00:00"), Time(["2014-06-25T00:00", "2014-09-24"])] ra, dec, _ = randomly_sample_sphere(2) positions = ((ra[0], dec[0]), (ra, dec)) spacetimes = product(times, positions) @pytest.mark.parametrize('st', spacetimes) def test_atciqz_aticq(st): """Check replacements against erfa versions for consistency.""" t, pos = st jd1, jd2 = get_jd12(t, 'tdb') astrom, _ = erfa.apci13(jd1, jd2) ra, dec = pos ra = ra.value dec = dec.value assert_allclose(erfa.atciqz(ra, dec, astrom), atciqz(ra, dec, astrom)) assert_allclose(erfa.aticq(ra, dec, astrom), aticq(ra, dec, astrom))
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import pytest from ... import units as u from ..builtin_frames import ICRS, Galactic, Galactocentric from .. import builtin_frames as bf from ...tests.helper import quantity_allclose from ..errors import ConvertError from .. import representation as r def test_api(): # transform observed Barycentric velocities to full-space Galactocentric gc_frame = Galactocentric() icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=101*u.pc, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(gc_frame) # transform a set of ICRS proper motions to Galactic icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr) icrs.transform_to(Galactic) # transform a Barycentric RV to a GSR RV icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=1.*u.pc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(Galactocentric) all_kwargs = [ dict(ra=37.4*u.deg, dec=-55.8*u.deg), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) ] @pytest.mark.parametrize('kwargs', all_kwargs) def test_all_arg_options(kwargs): # Above is a list of all possible valid combinations of arguments. # Here we do a simple thing and just verify that passing them in, we have # access to the relevant attributes from the resulting object icrs = ICRS(**kwargs) gal = icrs.transform_to(Galactic) repr_gal = repr(gal) for k in kwargs: getattr(icrs, k) if 'pm_ra_cosdec' in kwargs: # should have both assert 'pm_l_cosb' in repr_gal assert 'pm_b' in repr_gal assert 'mas / yr' in repr_gal if 'radial_velocity' not in kwargs: assert 'radial_velocity' not in repr_gal if 'radial_velocity' in kwargs: assert 'radial_velocity' in repr_gal assert 'km / s' in repr_gal if 'pm_ra_cosdec' not in kwargs: assert 'pm_l_cosb' not in repr_gal assert 'pm_b' not in repr_gal @pytest.mark.parametrize('cls,lon,lat', [ [bf.ICRS, 'ra', 'dec'], [bf.FK4, 'ra', 'dec'], [bf.FK4NoETerms, 'ra', 'dec'], [bf.FK5, 'ra', 'dec'], [bf.GCRS, 'ra', 'dec'], [bf.HCRS, 'ra', 'dec'], [bf.LSR, 'ra', 'dec'], [bf.CIRS, 'ra', 'dec'], [bf.Galactic, 'l', 'b'], [bf.AltAz, 'az', 'alt'], [bf.Supergalactic, 'sgl', 'sgb'], [bf.GalacticLSR, 'l', 'b'], [bf.HeliocentricTrueEcliptic, 'lon', 'lat'], [bf.GeocentricTrueEcliptic, 'lon', 'lat'], [bf.BarycentricTrueEcliptic, 'lon', 'lat'], [bf.PrecessedGeocentric, 'ra', 'dec'] ]) def test_expected_arg_names(cls, lon, lat): kwargs = {lon: 37.4*u.deg, lat: -55.8*u.deg, 'distance': 150*u.pc, 'pm_{0}_cos{1}'.format(lon, lat): -21.2*u.mas/u.yr, 'pm_{0}'.format(lat): 17.1*u.mas/u.yr, 'radial_velocity': 105.7*u.km/u.s} frame = cls(**kwargs) # these data are extracted from the vizier copy of XHIP: # http://vizier.u-strasbg.fr/viz-bin/VizieR-3?-source=+V/137A/XHIP _xhip_head = """ ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ R D pmRA pmDE Di pmGLon pmGLat RV U V W HIP AJ2000 (deg) EJ2000 (deg) (mas/yr) (mas/yr) GLon (deg) GLat (deg) st (pc) (mas/yr) (mas/yr) (km/s) (km/s) (km/s) (km/s) ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ """[1:-1] _xhip_data = """ 19 000.05331690 +38.30408633 -3.17 -15.37 112.00026470 -23.47789171 247.12 -6.40 -14.33 6.30 7.3 2.0 -17.9 20 000.06295067 +23.52928427 36.11 -22.48 108.02779304 -37.85659811 95.90 29.35 -30.78 37.80 -19.3 16.1 -34.2 21 000.06623581 +08.00723430 61.48 -0.23 101.69697120 -52.74179515 183.68 58.06 -20.23 -11.72 -45.2 -30.9 -1.3 24917 080.09698238 -33.39874984 -4.30 13.40 236.92324669 -32.58047131 107.38 -14.03 -1.15 36.10 -22.4 -21.3 -19.9 59207 182.13915108 +65.34963517 18.17 5.49 130.04157185 51.18258601 56.00 -18.98 -0.49 5.70 1.5 6.1 4.4 87992 269.60730667 +36.87462906 -89.58 72.46 62.98053142 25.90148234 129.60 45.64 105.79 -4.00 -39.5 -15.8 56.7 115110 349.72322473 -28.74087144 48.86 -9.25 23.00447250 -69.52799804 116.87 -8.37 -49.02 15.00 -16.8 -12.2 -23.6 """[1:-1] # in principal we could parse the above as a table, but doing it "manually" # makes this test less tied to Table working correctly @pytest.mark.parametrize('hip,ra,dec,pmra,pmdec,glon,glat,dist,pmglon,pmglat,rv,U,V,W', [[float(val) for val in row.split()] for row in _xhip_data.split('\n')]) def test_xhip_galactic(hip, ra, dec, pmra, pmdec, glon, glat, dist, pmglon, pmglat, rv, U, V, W): i = ICRS(ra*u.deg, dec*u.deg, dist*u.pc, pm_ra_cosdec=pmra*u.marcsec/u.yr, pm_dec=pmdec*u.marcsec/u.yr, radial_velocity=rv*u.km/u.s) g = i.transform_to(Galactic) # precision is limited by 2-deciimal digit string representation of pms assert quantity_allclose(g.pm_l_cosb, pmglon*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) assert quantity_allclose(g.pm_b, pmglat*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) # make sure UVW also makes sense uvwg = g.cartesian.differentials['s'] # precision is limited by 1-decimal digit string representation of vels assert quantity_allclose(uvwg.d_x, U*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_y, V*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_z, W*u.km/u.s, atol=.1*u.km/u.s) @pytest.mark.parametrize('kwargs,expect_success', [ [dict(ra=37.4*u.deg, dec=-55.8*u.deg), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), True], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s), True] ]) def test_frame_affinetransform(kwargs, expect_success): """There are already tests in test_transformations.py that check that an AffineTransform fails without full-space data, but this just checks that things work as expected at the frame level as well. """ icrs = ICRS(**kwargs) if expect_success: gc = icrs.transform_to(Galactocentric) else: with pytest.raises(ConvertError): icrs.transform_to(Galactocentric) def test_differential_cls_arg(): """ Test passing in an explicit differential class to the initializer or changing the differential class via set_representation_cls """ from ..builtin_frames import ICRS icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls=r.UnitSphericalDifferential) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls={'s': r.UnitSphericalDifferential}) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra_cosdec=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr) icrs.set_representation_cls(s=r.UnitSphericalDifferential) assert quantity_allclose(icrs.pm_ra, 20*u.mas/u.yr) # incompatible representation and differential with pytest.raises(TypeError): ICRS(ra=1*u.deg, dec=60*u.deg, v_x=1*u.km/u.s, v_y=-2*u.km/u.s, v_z=-2*u.km/u.s, differential_cls=r.CartesianDifferential) # specify both icrs = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, v_x=1*u.km/u.s, v_y=2*u.km/u.s, v_z=3*u.km/u.s, representation=r.CartesianRepresentation, differential_cls=r.CartesianDifferential) assert icrs.x == 1*u.pc assert icrs.y == 2*u.pc assert icrs.z == 3*u.pc assert icrs.v_x == 1*u.km/u.s assert icrs.v_y == 2*u.km/u.s assert icrs.v_z == 3*u.km/u.s def test_slicing_preserves_differential(): icrs = ICRS(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) icrs2 = icrs.reshape(1,1)[:1,0] for name in icrs.representation_component_names.keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0] for name in icrs.get_representation_component_names('s').keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0]
AustereCuriosity/astropy
astropy/coordinates/tests/test_frames_with_velocity.py
astropy/coordinates/tests/test_atc_replacements.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst # This file connects the readers/writers to the astropy.table.Table class from __future__ import absolute_import, division, print_function import re import functools from .. import registry as io_registry from ...table import Table from ...extern.six.moves import zip __all__ = [] # Generic # ======= def read_asciitable(filename, **kwargs): from .ui import read return read(filename, **kwargs) io_registry.register_reader('ascii', Table, read_asciitable) def write_asciitable(table, filename, **kwargs): from .ui import write return write(table, filename, **kwargs) io_registry.register_writer('ascii', Table, write_asciitable) def io_read(format, filename, **kwargs): from .ui import read format = re.sub(r'^ascii\.', '', format) return read(filename, format=format, **kwargs) def io_write(format, table, filename, **kwargs): from .ui import write format = re.sub(r'^ascii\.', '', format) return write(table, filename, format=format, **kwargs) def io_identify(suffix, origin, filepath, fileobj, *args, **kwargs): return filepath is not None and filepath.endswith(suffix) def _get_connectors_table(): from .core import FORMAT_CLASSES rows = [] rows.append(('ascii', '', 'Yes', 'ASCII table in any supported format (uses guessing)')) for format in sorted(FORMAT_CLASSES): cls = FORMAT_CLASSES[format] io_format = 'ascii.' + cls._format_name description = getattr(cls, '_description', '') class_link = ':class:`~{0}.{1}`'.format(cls.__module__, cls.__name__) suffix = getattr(cls, '_io_registry_suffix', '') can_write = 'Yes' if getattr(cls, '_io_registry_can_write', True) else '' rows.append((io_format, suffix, can_write, '{0}: {1}'.format(class_link, description))) out = Table(list(zip(*rows)), names=('Format', 'Suffix', 'Write', 'Description')) for colname in ('Format', 'Description'): width = max(len(x) for x in out[colname]) out[colname].format = '%-{0}s'.format(width) return out # Specific # ======== def read_csv(filename, **kwargs): from .ui import read kwargs['format'] = 'csv' return read(filename, **kwargs) def write_csv(table, filename, **kwargs): from .ui import write kwargs['format'] = 'csv' return write(table, filename, **kwargs) csv_identify = functools.partial(io_identify, '.csv') io_registry.register_reader('csv', Table, read_csv) io_registry.register_writer('csv', Table, write_csv) io_registry.register_identifier('csv', Table, csv_identify)
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import pytest from ... import units as u from ..builtin_frames import ICRS, Galactic, Galactocentric from .. import builtin_frames as bf from ...tests.helper import quantity_allclose from ..errors import ConvertError from .. import representation as r def test_api(): # transform observed Barycentric velocities to full-space Galactocentric gc_frame = Galactocentric() icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=101*u.pc, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(gc_frame) # transform a set of ICRS proper motions to Galactic icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, pm_ra_cosdec=21*u.mas/u.yr, pm_dec=-71*u.mas/u.yr) icrs.transform_to(Galactic) # transform a Barycentric RV to a GSR RV icrs = ICRS(ra=151.*u.deg, dec=-16*u.deg, distance=1.*u.pc, pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr, radial_velocity=71*u.km/u.s) icrs.transform_to(Galactocentric) all_kwargs = [ dict(ra=37.4*u.deg, dec=-55.8*u.deg), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) ] @pytest.mark.parametrize('kwargs', all_kwargs) def test_all_arg_options(kwargs): # Above is a list of all possible valid combinations of arguments. # Here we do a simple thing and just verify that passing them in, we have # access to the relevant attributes from the resulting object icrs = ICRS(**kwargs) gal = icrs.transform_to(Galactic) repr_gal = repr(gal) for k in kwargs: getattr(icrs, k) if 'pm_ra_cosdec' in kwargs: # should have both assert 'pm_l_cosb' in repr_gal assert 'pm_b' in repr_gal assert 'mas / yr' in repr_gal if 'radial_velocity' not in kwargs: assert 'radial_velocity' not in repr_gal if 'radial_velocity' in kwargs: assert 'radial_velocity' in repr_gal assert 'km / s' in repr_gal if 'pm_ra_cosdec' not in kwargs: assert 'pm_l_cosb' not in repr_gal assert 'pm_b' not in repr_gal @pytest.mark.parametrize('cls,lon,lat', [ [bf.ICRS, 'ra', 'dec'], [bf.FK4, 'ra', 'dec'], [bf.FK4NoETerms, 'ra', 'dec'], [bf.FK5, 'ra', 'dec'], [bf.GCRS, 'ra', 'dec'], [bf.HCRS, 'ra', 'dec'], [bf.LSR, 'ra', 'dec'], [bf.CIRS, 'ra', 'dec'], [bf.Galactic, 'l', 'b'], [bf.AltAz, 'az', 'alt'], [bf.Supergalactic, 'sgl', 'sgb'], [bf.GalacticLSR, 'l', 'b'], [bf.HeliocentricTrueEcliptic, 'lon', 'lat'], [bf.GeocentricTrueEcliptic, 'lon', 'lat'], [bf.BarycentricTrueEcliptic, 'lon', 'lat'], [bf.PrecessedGeocentric, 'ra', 'dec'] ]) def test_expected_arg_names(cls, lon, lat): kwargs = {lon: 37.4*u.deg, lat: -55.8*u.deg, 'distance': 150*u.pc, 'pm_{0}_cos{1}'.format(lon, lat): -21.2*u.mas/u.yr, 'pm_{0}'.format(lat): 17.1*u.mas/u.yr, 'radial_velocity': 105.7*u.km/u.s} frame = cls(**kwargs) # these data are extracted from the vizier copy of XHIP: # http://vizier.u-strasbg.fr/viz-bin/VizieR-3?-source=+V/137A/XHIP _xhip_head = """ ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ R D pmRA pmDE Di pmGLon pmGLat RV U V W HIP AJ2000 (deg) EJ2000 (deg) (mas/yr) (mas/yr) GLon (deg) GLat (deg) st (pc) (mas/yr) (mas/yr) (km/s) (km/s) (km/s) (km/s) ------ ------------ ------------ -------- -------- ------------ ------------ ------- -------- -------- ------- ------ ------ ------ """[1:-1] _xhip_data = """ 19 000.05331690 +38.30408633 -3.17 -15.37 112.00026470 -23.47789171 247.12 -6.40 -14.33 6.30 7.3 2.0 -17.9 20 000.06295067 +23.52928427 36.11 -22.48 108.02779304 -37.85659811 95.90 29.35 -30.78 37.80 -19.3 16.1 -34.2 21 000.06623581 +08.00723430 61.48 -0.23 101.69697120 -52.74179515 183.68 58.06 -20.23 -11.72 -45.2 -30.9 -1.3 24917 080.09698238 -33.39874984 -4.30 13.40 236.92324669 -32.58047131 107.38 -14.03 -1.15 36.10 -22.4 -21.3 -19.9 59207 182.13915108 +65.34963517 18.17 5.49 130.04157185 51.18258601 56.00 -18.98 -0.49 5.70 1.5 6.1 4.4 87992 269.60730667 +36.87462906 -89.58 72.46 62.98053142 25.90148234 129.60 45.64 105.79 -4.00 -39.5 -15.8 56.7 115110 349.72322473 -28.74087144 48.86 -9.25 23.00447250 -69.52799804 116.87 -8.37 -49.02 15.00 -16.8 -12.2 -23.6 """[1:-1] # in principal we could parse the above as a table, but doing it "manually" # makes this test less tied to Table working correctly @pytest.mark.parametrize('hip,ra,dec,pmra,pmdec,glon,glat,dist,pmglon,pmglat,rv,U,V,W', [[float(val) for val in row.split()] for row in _xhip_data.split('\n')]) def test_xhip_galactic(hip, ra, dec, pmra, pmdec, glon, glat, dist, pmglon, pmglat, rv, U, V, W): i = ICRS(ra*u.deg, dec*u.deg, dist*u.pc, pm_ra_cosdec=pmra*u.marcsec/u.yr, pm_dec=pmdec*u.marcsec/u.yr, radial_velocity=rv*u.km/u.s) g = i.transform_to(Galactic) # precision is limited by 2-deciimal digit string representation of pms assert quantity_allclose(g.pm_l_cosb, pmglon*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) assert quantity_allclose(g.pm_b, pmglat*u.marcsec/u.yr, atol=.01*u.marcsec/u.yr) # make sure UVW also makes sense uvwg = g.cartesian.differentials['s'] # precision is limited by 1-decimal digit string representation of vels assert quantity_allclose(uvwg.d_x, U*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_y, V*u.km/u.s, atol=.1*u.km/u.s) assert quantity_allclose(uvwg.d_z, W*u.km/u.s, atol=.1*u.km/u.s) @pytest.mark.parametrize('kwargs,expect_success', [ [dict(ra=37.4*u.deg, dec=-55.8*u.deg), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc), True], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, radial_velocity=105.7*u.km/u.s), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, radial_velocity=105.7*u.km/u.s, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr), False], [dict(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s), True] ]) def test_frame_affinetransform(kwargs, expect_success): """There are already tests in test_transformations.py that check that an AffineTransform fails without full-space data, but this just checks that things work as expected at the frame level as well. """ icrs = ICRS(**kwargs) if expect_success: gc = icrs.transform_to(Galactocentric) else: with pytest.raises(ConvertError): icrs.transform_to(Galactocentric) def test_differential_cls_arg(): """ Test passing in an explicit differential class to the initializer or changing the differential class via set_representation_cls """ from ..builtin_frames import ICRS icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls=r.UnitSphericalDifferential) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr, differential_cls={'s': r.UnitSphericalDifferential}) assert icrs.pm_ra == 10*u.mas/u.yr icrs = ICRS(ra=1*u.deg, dec=60*u.deg, pm_ra_cosdec=10*u.mas/u.yr, pm_dec=-11*u.mas/u.yr) icrs.set_representation_cls(s=r.UnitSphericalDifferential) assert quantity_allclose(icrs.pm_ra, 20*u.mas/u.yr) # incompatible representation and differential with pytest.raises(TypeError): ICRS(ra=1*u.deg, dec=60*u.deg, v_x=1*u.km/u.s, v_y=-2*u.km/u.s, v_z=-2*u.km/u.s, differential_cls=r.CartesianDifferential) # specify both icrs = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc, v_x=1*u.km/u.s, v_y=2*u.km/u.s, v_z=3*u.km/u.s, representation=r.CartesianRepresentation, differential_cls=r.CartesianDifferential) assert icrs.x == 1*u.pc assert icrs.y == 2*u.pc assert icrs.z == 3*u.pc assert icrs.v_x == 1*u.km/u.s assert icrs.v_y == 2*u.km/u.s assert icrs.v_z == 3*u.km/u.s def test_slicing_preserves_differential(): icrs = ICRS(ra=37.4*u.deg, dec=-55.8*u.deg, distance=150*u.pc, pm_ra_cosdec=-21.2*u.mas/u.yr, pm_dec=17.1*u.mas/u.yr, radial_velocity=105.7*u.km/u.s) icrs2 = icrs.reshape(1,1)[:1,0] for name in icrs.representation_component_names.keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0] for name in icrs.get_representation_component_names('s').keys(): assert getattr(icrs, name) == getattr(icrs2, name)[0]
AustereCuriosity/astropy
astropy/coordinates/tests/test_frames_with_velocity.py
astropy/io/ascii/connect.py
from collections import Mapping from contextlib import contextmanager from itertools import izip from cached_property import cached_property from sqlalchemy import MetaData, create_engine, event, inspect from sqlalchemy.exc import ArgumentError, DisconnectionError, InvalidRequestError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from sqlalchemy.pool import Pool from fixtures.pytest_store import store from utils import conf from utils.log import logger @event.listens_for(Pool, "checkout") def ping_connection(dbapi_connection, connection_record, connection_proxy): """ping_connection event hook, used to reconnect db sessions that time out Note: See also: :ref:`Connection Invalidation <sqlalchemy:pool_connection_invalidation>` """ cursor = dbapi_connection.cursor() try: cursor.execute("SELECT 1") except StandardError: raise DisconnectionError cursor.close() class Db(Mapping): """Helper class for interacting with a CFME database using SQLAlchemy Args: hostname: base url to be used (default is from current_appliance) credentials: name of credentials to use from :py:attr:`utils.conf.credentials` (default ``database``) Provides convient attributes to common sqlalchemy objects related to this DB, as well as a Mapping interface to access and reflect database tables. Where possible, attributes are cached. Db objects support getting tables by name via the mapping interface:: table = db['table_name'] Usage: # Usually used to query the DB for info, here's a common query for vm in db.session.query(db['vms']).all(): print(vm.name) print(vm.guid) # List comprehension to get all templates [(vm.name, vm.guid) for vm in session.query(db['vms']).all() if vm.template is True] # Use the transaction manager for write operations: with db.transaction: db.session.query(db['vms']).all().delete() Note: Creating a table object requires a call to the database so that SQLAlchemy can do reflection to determine the table's structure (columns, keys, indices, etc). On a latent connection, this can be extremely slow, which will affect methods that return tables, like the mapping interface or :py:meth:`values`. """ def __init__(self, hostname=None, credentials=None, port=None): self._table_cache = {} self.hostname = hostname or store.current_appliance.db.address self.port = port or store.current_appliance.db_port self.credentials = credentials or conf.credentials['database'] def __getitem__(self, table_name): """Access tables as items contained in this db Usage: # To get a table called 'table_name': db['table_name'] This may return ``None`` in the case where a table is found but reflection fails. """ try: return self._table(table_name) except InvalidRequestError: raise KeyError('Table {} could not be found'.format(table_name)) def __iter__(self): """Iterator of table names in this db""" return self.keys() def __len__(self): """Number of tables in this db""" return len(self.table_names) def __contains__(self, table_name): """Whether or not the named table is in this db""" return table_name in self.table_names def keys(self): """Iterator of table names in this db""" return (table_name for table_name in self.table_names) def items(self): """Iterator of ``(table_name, table)`` pairs""" return izip(self.keys(), self.values()) def values(self): """Iterator of tables in this db""" return (self[table_name] for table_name in self.table_names) def get(self, table_name, default=None): """table getter Args: table_name: Name of the table to get default: Default value to return if ``table_name`` is not found. Returns: a table if ``table_name`` exists, otherwise 'None' or the passed-in default """ try: return self[table_name] except KeyError: return default def copy(self): """Copy this database instance, keeping the same credentials and hostname""" return type(self)(self.hostname, self.credentials) def __eq__(self, other): """Check if this db is equal to another db""" try: return self.hostname == other.hostname except: return False def __ne__(self, other): """Check if this db is not equal to another db""" return not self == other @cached_property def engine(self): """The :py:class:`Engine <sqlalchemy:sqlalchemy.engine.Engine>` for this database It uses pessimistic disconnection handling, checking that the database is still connected before executing commands. """ return create_engine(self.db_url, echo_pool=True) @cached_property def sessionmaker(self): """A :py:class:`sessionmaker <sqlalchemy:sqlalchemy.orm.session.sessionmaker>` Used to make new sessions with this database, as needed. """ return sessionmaker(bind=self.engine) @cached_property def table_base(self): """Base class for all tables returned by this database This base class is created using :py:class:`declarative_base <sqlalchemy:sqlalchemy.ext.declarative.declarative_base>`. """ return declarative_base(metadata=self.metadata) @cached_property def metadata(self): """:py:class:`MetaData <sqlalchemy:sqlalchemy.schema.MetaData>` for this database This can be used for introspection of reflected items. Note: Tables that haven't been reflected won't show up in metadata. To reflect a table, use :py:meth:`reflect_table`. """ return MetaData(bind=self.engine) @cached_property def db_url(self): """The connection URL for this database, including credentials""" template = "postgresql://{username}:{password}@{host}:{port}/vmdb_production" result = template.format(host=self.hostname, port=self.port, **self.credentials) logger.info("[DB] db_url is %s", result) return result @cached_property def table_names(self): """A sorted list of table names available in this database.""" # rails table names follow similar rules as pep8 identifiers; expose them as such return sorted(inspect(self.engine).get_table_names()) @cached_property def session(self): """Returns a :py:class:`Session <sqlalchemy:sqlalchemy.orm.session.Session>` This is used for database queries. For writing to the database, start a :py:meth:`transaction`. Note: This attribute is cached. In cases where a new session needs to be explicitly created, use :py:meth:`sessionmaker`. """ return self.sessionmaker(autocommit=True) @property @contextmanager def transaction(self): """Context manager for simple transaction management Sessions understand the concept of transactions, and provider context managers to handle conditionally committing or rolling back transactions as needed. Note: Sessions automatically commit transactions by default. For predictable results when writing to the database, use the transaction manager. Usage: with db.transaction: db.session.do_something() """ with self.session.begin(): yield def reflect_table(self, table_name): """Populate :py:attr:`metadata` with information on a table Args: table_name: The name of a table to reflect """ self.metadata.reflect(only=[table_name]) def _table(self, table_name): """Retrieves, reflects, and caches table objects Actual implementation of __getitem__ """ try: return self._table_cache[table_name] except KeyError: self.reflect_table(table_name) table = self.metadata.tables[table_name] table_dict = { '__table__': table, '__tablename__': table_name } try: table_cls = type(str(table_name), (self.table_base,), table_dict) self._table_cache[table_name] = table_cls return table_cls except ArgumentError: # This usually happens on join tables with no PKs logger.info('Unable to create table class for table "{}"'.format(table_name)) return None @contextmanager def database_on_server(hostname, **kwargs): db_obj = Db(hostname=hostname, **kwargs) yield db_obj
import pytest from cfme.web_ui import PagedTable, toolbar as tb from cfme.containers.pod import Pod from cfme.containers.provider import ContainersProvider, ContainersTestItem from cfme.containers.service import Service from cfme.containers.node import Node from cfme.containers.replicator import Replicator from cfme.containers.image import Image from cfme.containers.project import Project from cfme.containers.container import Container from cfme.containers.image_registry import ImageRegistry from cfme.containers.route import Route from utils.appliance.implementations.ui import navigate_to from utils.log import logger from utils import testgen pytestmark = [ pytest.mark.usefixtures('setup_provider'), pytest.mark.tier(1)] pytest_generate_tests = testgen.generate([ContainersProvider], scope='function') # The polarion markers below are used to mark the test item # with polarion test case ID. # TODO: future enhancement - https://github.com/pytest-dev/pytest/pull/1921 TEST_ITEMS = [ pytest.mark.polarion('CMP-9859')( ContainersTestItem( ContainersProvider, 'CMP-9859', fields_to_verify=['hostname', 'port', 'type'] ) ), pytest.mark.polarion('CMP-10651')( ContainersTestItem( Route, 'CMP-10651', fields_to_verify=['provider', 'project_name'] ) ), pytest.mark.polarion('CMP-9943')( ContainersTestItem( Container, 'CMP-9943', fields_to_verify=['pod_name', 'image', 'state'] ) ), pytest.mark.polarion('CMP-9909')( ContainersTestItem( Pod, 'CMP-9909', fields_to_verify=[ 'provider', 'project_name', 'ready', 'containers', 'phase', 'restart_policy', 'dns_policy' ] ) ), pytest.mark.polarion('CMP-9889')( ContainersTestItem( Service, 'CMP-9889', fields_to_verify=[ 'provider', 'project_name', 'type', 'portal_ip', 'session_affinity', 'pods' ] ) ), pytest.mark.polarion('CMP-9967')( ContainersTestItem( Node, 'CMP-9967', fields_to_verify=[ 'provider', 'ready', 'operating_system', 'kernel_version', 'runtime_version' ] ) ), pytest.mark.polarion('CMP-9920')( ContainersTestItem( Replicator, 'CMP-9920', fields_to_verify=['provider', 'project_name', 'replicas', 'current_replicas'] ) ), pytest.mark.polarion('CMP-9975')( ContainersTestItem( Image, 'CMP-9975', fields_to_verify=['provider', 'tag', 'id', 'image_registry'] ) ), pytest.mark.polarion('CMP-9985')( ContainersTestItem( ImageRegistry, 'CMP-9985', fields_to_verify=['port', 'provider'] ) ), pytest.mark.polarion('CMP-10652')( ContainersTestItem( Project, 'CMP-9886', fields_to_verify=[ 'provider', 'container_routes', 'container_services', 'container_replicators', 'pods', 'containers', 'images' ] ) ) ] @pytest.mark.parametrize('test_item', TEST_ITEMS, ids=[ti.args[1].pretty_id() for ti in TEST_ITEMS]) def test_tables_fields(provider, test_item, soft_assert): navigate_to(test_item.obj, 'All') tb.select('List View') # NOTE: We must re-instantiate here table # in order to prevent StaleElementException or UsingSharedTables # TODO: Switch to widgetastic paged_tbl = PagedTable(table_locator="//div[@id='list_grid']//table") for row in paged_tbl.rows(): cell = row[2] # We're using indexing since it could be either 'name' or 'host' if cell: name = cell.text else: logger.error('Could not find NAME header on {}s list...' .format(test_item.obj.__name__)) continue for field in test_item.fields_to_verify: try: value = getattr(row, field) except AttributeError: soft_assert(False, '{}\'s list table: field not exist: {}' .format(test_item.obj.__name__, field)) continue soft_assert(value, '{}\'s list table: {} row - has empty field: {}' .format(test_item.obj.__name__, name, field))
dajohnso/cfme_tests
cfme/tests/containers/test_tables_fields.py
utils/db.py
"""Analog log format definitions.""" from __future__ import (absolute_import, division, print_function, unicode_literals) from collections import namedtuple import re import weakref from analog.exceptions import InvalidFormatExpressionError class LogFormat: """Log format definition. Represents log format recognition patterns by name. A name:format mapping of all defined log format patterns can be retrieved using :py:meth:`analog.formats.LogFormat.all_formats`. Each log format should at least define the following match groups: * ``timestamp``: Local time. * ``verb``: HTTP verb (GET, POST, PUT, ...). * ``path``: Request path. * ``status``: Response status code. * ``body_bytes_sent``: Body size in bytes. * ``request_time``: Request time. * ``upstream_response_time``: Upstream response time. """ #: pool of all predefined log formats __formats_ = {} #: required pattern groups _required_attributes = ('timestamp', 'verb', 'path', 'status', 'body_bytes_sent', 'request_time', 'upstream_response_time') def __init__(self, name, pattern, time_format): """Describe log format. The format ``pattern`` is a (verbose) regex pattern string specifying the log entry attributes as named groups that is compiled into a :py:class:`re.Pattern` object. All pattern group names are be available as attributes of log entries when using a :py:meth:`analog.formats.LogEntry.entry`. :param name: log format name. :type name: ``str`` :param pattern: regular expression pattern string. :type pattern: raw ``str`` :param time_format: timestamp parsing pattern. :type time_format: ``str`` :raises: :py:class:`analog.exceptions.InvalidFormatExpressionError` if missing required format pattern groups or the pattern is not a valid regular expression. """ self.__formats_[name] = weakref.ref(self) self.name = name try: self.pattern = re.compile(pattern, re.UNICODE | re.VERBOSE) except re.error: raise InvalidFormatExpressionError("Invalid regex in format.") attributes = self.pattern.groupindex.keys() for attr in self._required_attributes: if attr not in attributes: raise InvalidFormatExpressionError( "Format pattern must at least define the groups: " "{0}.".format(", ".join(self._required_attributes))) self.time_format = time_format self._entry = namedtuple( 'LogEntry{0}'.format(name.title()), sorted(self.pattern.groupindex, key=self.pattern.groupindex.get)) def entry(self, match): """Convert regex match object to log entry object. :param match: regex match object from ``pattern`` match. :type match: :py:class:`re.MatchObject` :returns: log entry object with all pattern keys as attributes. :rtype: :py:class:`collections.namedtuple` """ return self._entry(**match.groupdict()) @classmethod def all_formats(cls): """Mapping of all defined log format patterns. :returns: dictionary of name:``LogFormat`` instances. :rtype: ``dict`` """ formats = {} for name, ref in cls.__formats_.items(): instance = ref() if instance is not None: formats[name] = instance return formats NGINX = LogFormat('nginx', r''' ^(?P<remote_addr>\S+)\s-\s # Remote address (?P<remote_user>\S+)\s # Remote user \[(?P<timestamp>.*?)\]\s # Local time " # Request (?P<verb>[A-Z]+)\s # HTTP verb (GET, POST, PUT, ...) (?P<path>[^?]+) # Request path (?:\?.+)? # Query string \sHTTP/(?:[\d.]+) # HTTP/x.x protocol "\s # /Request (?P<status>\d+?)\s # Response status code (?P<body_bytes_sent>\d+?)\s # Body size in bytes "(?P<http_referer>[^"]+?)"\s # Referer header "(?P<http_user_agent>[^"]+?)"\s # User-Agent header "(?P<http_x_forwarded_for>[^"]+?)"\s # X-Forwarded-For header (?P<request_time>[\d\.]+)\s # Request time (?P<upstream_response_time>[\d\.]+)\s? # Upstream response time (?P<pipe>\S+)?$ # Pipelined request ''', time_format='%d/%b/%Y:%H:%M:%S +0000') """Nginx ``combinded_timed`` format:: '$remote_addr - $remote_user [$time_local] "$request" ' '$status $body_bytes_sent "$http_referer" ' '"$http_user_agent" "$http_x_forwarded_for" ' '$request_time $upstream_response_time $pipe'; """
"""Test the analog.formats module.""" from __future__ import (absolute_import, division, print_function, unicode_literals) import datetime import pytest from analog.exceptions import InvalidFormatExpressionError from analog.formats import LogFormat, NGINX def test_predefined_valid_nginx(): """The predefined ``NGINX`` ``LogFormat`` is valid.""" # NGINX is a LogFormat instance and registered as such assert isinstance(NGINX, LogFormat) assert 'nginx' in LogFormat.all_formats() # all required match groups are available match_groups = NGINX.pattern.groupindex.keys() for required in LogFormat._required_attributes: assert required in match_groups # timestamp conversion is working now = datetime.datetime.now().replace(microsecond=0) now_str = now.strftime(NGINX.time_format) now_parsed = datetime.datetime.strptime(now_str, NGINX.time_format) assert now == now_parsed # try matching a log entry log_line = ('123.123.123.123 - test_client [16/Jan/2014:13:30:30 +0000] ' '"POST /auth/token HTTP/1.1" 200 174 "-" ' '"OAuthClient 0.2.3" "-" 0.633 0.633') match = NGINX.pattern.search(log_line) log_entry = NGINX.entry(match) # all entry attributes are correctly populated assert log_entry.remote_addr == '123.123.123.123' assert log_entry.remote_user == 'test_client' assert log_entry.timestamp == '16/Jan/2014:13:30:30 +0000' assert log_entry.verb == 'POST' assert log_entry.path == '/auth/token' assert log_entry.status == '200' assert log_entry.body_bytes_sent == '174' assert log_entry.http_referer == '-' assert log_entry.http_user_agent == 'OAuthClient 0.2.3' assert log_entry.http_x_forwarded_for == '-' assert log_entry.request_time == '0.633' assert log_entry.upstream_response_time == '0.633' def test_custom_logformat_missing_groups(): """Custom ``LogFormat`` patterns must include all required match groups.""" pattern_regex = r'(?P<some_group>.*)' time_format = '%d/%b/%Y:%H:%M:%S +0000' with pytest.raises(InvalidFormatExpressionError) as exc: LogFormat('invalid', pattern_regex, time_format) assert ('InvalidFormatExpressionError: ' 'Format pattern must at least define the groups: {0}'.format( ', '.join(LogFormat._required_attributes))) in str(exc) def test_custom_logformat_invalid_regex(): """Custom ``LogFormat`` patterns must be valid regular expressions.""" pattern_regex = r'(?P<incomplete)' time_format = '%d/%b/%Y:%H:%M:%S +0000' with pytest.raises(InvalidFormatExpressionError) as exc: LogFormat('invalid', pattern_regex, time_format) assert 'Invalid regex in format.' in str(exc)
fabianbuechler/analog
analog/tests/test_formats.py
analog/formats.py
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2015-2016 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """TabIndex displayed in the statusbar.""" from PyQt5.QtCore import pyqtSlot from qutebrowser.mainwindow.statusbar import textbase class TabIndex(textbase.TextBase): """Shows current tab index and number of tabs in the statusbar.""" @pyqtSlot(int, int) def on_tab_index_changed(self, current, count): """Update tab index when tab changed.""" self.setText('[{}/{}]'.format(current + 1, count))
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2015-2016 Daniel Schadt # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Tests for the custom TabWidget/TabBar.""" import pytest from qutebrowser.mainwindow import tabwidget from qutebrowser.config import configtypes from PyQt5.QtGui import QIcon, QPixmap, QFont, QColor from PyQt5.QtCore import Qt class TestTabWidget: """Tests for TabWidget.""" CONFIG = { 'fonts': { 'tabbar': QFont(), }, 'tabs': { 'show-switching-delay': 800, 'movable': True, 'position': 0, 'select-on-remove': 1, 'show': 'always', 'padding': configtypes.PaddingValues(0, 0, 5, 5), 'indicator-width': 3, 'indicator-padding': configtypes.PaddingValues(2, 2, 0, 4), 'title-format': '{index}: {title}', 'title-alignment': Qt.AlignLeft, }, 'colors': { 'tabs.bg.bar': QColor(), 'tabs.bg.selected.even': QColor(), 'tabs.fg.selected.even': QColor(), } } @pytest.fixture def widget(self, qtbot, config_stub): config_stub.data = self.CONFIG w = tabwidget.TabWidget(0) qtbot.addWidget(w) return w def test_small_icon_doesnt_crash(self, widget, qtbot, stubs): """Test that setting a small icon doesn't produce a crash. Regression test for #1015. """ # Size taken from issue report pixmap = QPixmap(72, 1) icon = QIcon(pixmap) page = stubs.FakeWebView() widget.addTab(page, icon, 'foobar') widget.show() qtbot.waitForWindowShown(widget)
bcb/qutebrowser
tests/unit/mainwindow/test_tabwidget.py
qutebrowser/mainwindow/statusbar/tabindex.py
""" A model of an Infrastructure Provider in CFME :var page: A :py:class:`cfme.web_ui.Region` object describing common elements on the Providers pages. :var discover_form: A :py:class:`cfme.web_ui.Form` object describing the discover form. :var properties_form: A :py:class:`cfme.web_ui.Form` object describing the main add form. :var default_form: A :py:class:`cfme.web_ui.Form` object describing the default credentials form. :var candu_form: A :py:class:`cfme.web_ui.Form` object describing the C&U credentials form. """ from functools import partial from cfme.common.provider import CloudInfraProvider from cfme.fixtures import pytest_selenium as sel from cfme.infrastructure.host import Host from cfme.web_ui import ( Region, Quadicon, Form, Select, CheckboxTree, fill, form_buttons, paginator, Input, AngularSelect, toolbar as tb, Radio ) from cfme.web_ui.form_buttons import FormButton from cfme.web_ui.menu import nav from cfme.web_ui.tabstrip import TabStripForm from utils import conf, deferred_verpick, version from utils.api import rest_api from utils.db import cfmedb from utils.log import logger from utils.pretty import Pretty from utils.varmeth import variable from utils.wait import wait_for details_page = Region(infoblock_type='detail') # Forms discover_form = Form( fields=[ ('rhevm_chk', Input("discover_type_rhevm")), ('vmware_chk', Input("discover_type_virtualcenter")), ('scvmm_chk', Input("discover_type_scvmm")), ('from_0', Input("from_first")), ('from_1', Input("from_second")), ('from_2', Input("from_third")), ('from_3', Input("from_fourth")), ('to_3', Input("to_fourth")), ('start_button', FormButton("Start the Host Discovery")) ]) properties_form = Form( fields=[ ('type_select', { version.LOWEST: Select('select#server_emstype'), '5.5': AngularSelect("server_emstype") }), ('name_text', Input("name")), ('hostname_text', { version.LOWEST: Input("hostname"), }), ('ipaddress_text', Input("ipaddress"), {"removed_since": "5.4.0.0.15"}), ('api_port', Input("port")), ('sec_protocol', {version.LOWEST: Select("select#security_protocol"), '5.5': AngularSelect("security_protocol")}), ('sec_realm', Input("realm")) ]) properties_form_56 = TabStripForm( fields=[ ('type_select', AngularSelect("emstype")), ('name_text', Input("name")), ("api_version", AngularSelect("api_version")), ], tab_fields={ "Default": [ ('hostname_text', Input("default_hostname")), ('api_port', Input("default_api_port")), ('sec_protocol', AngularSelect("default_security_protocol")), ], "Events": [ ('event_selection', Radio('event_stream_selection')), ('amqp_hostname_text', Input("amqp_hostname")), ('amqp_api_port', Input("amqp_api_port")), ('amqp_sec_protocol', AngularSelect("amqp_security_protocol")), ], "C & U Database": [ ('candu_hostname_text', Input("metrics_hostname")), ('acandu_api_port', Input("metrics_api_port")), ] }) prop_region = Region( locators={ 'properties_form': { version.LOWEST: properties_form, '5.6': properties_form_56, } } ) manage_policies_tree = CheckboxTree("//div[@id='protect_treebox']/ul") cfg_btn = partial(tb.select, 'Configuration') pol_btn = partial(tb.select, 'Policy') mon_btn = partial(tb.select, 'Monitoring') nav.add_branch('infrastructure_providers', {'infrastructure_provider_new': lambda _: cfg_btn( 'Add a New Infrastructure Provider'), 'infrastructure_provider_discover': lambda _: cfg_btn( 'Discover Infrastructure Providers'), 'infrastructure_provider': [lambda ctx: sel.click(Quadicon(ctx['provider'].name, 'infra_prov')), {'infrastructure_provider_edit': lambda _: cfg_btn('Edit this Infrastructure Provider'), 'infrastructure_provider_policy_assignment': lambda _: pol_btn('Manage Policies'), 'infrastructure_provider_timelines': lambda _: mon_btn('Timelines')}]}) class Provider(Pretty, CloudInfraProvider): """ Abstract model of an infrastructure provider in cfme. See VMwareProvider or RHEVMProvider. Args: name: Name of the provider. details: a details record (see VMwareDetails, RHEVMDetails inner class). credentials (Credential): see Credential inner class. key: The CFME key of the provider in the yaml. candu: C&U credentials if this is a RHEVMDetails class. Usage: myprov = VMwareProvider(name='foo', region='us-west-1', credentials=Provider.Credential(principal='admin', secret='foobar')) myprov.create() """ pretty_attrs = ['name', 'key', 'zone'] STATS_TO_MATCH = ['num_template', 'num_vm', 'num_datastore', 'num_host', 'num_cluster'] string_name = "Infrastructure" page_name = "infrastructure" instances_page_name = "infra_vm_and_templates" templates_page_name = "infra_vm_and_templates" quad_name = "infra_prov" _properties_region = prop_region # This will get resolved in common to a real form add_provider_button = deferred_verpick({ version.LOWEST: form_buttons.FormButton("Add this Infrastructure Provider"), '5.6': form_buttons.add }) save_button = deferred_verpick({ version.LOWEST: form_buttons.save, '5.6': form_buttons.angular_save }) def __init__( self, name=None, credentials=None, key=None, zone=None, provider_data=None): if not credentials: credentials = {} self.name = name self.credentials = credentials self.key = key self.provider_data = provider_data self.zone = zone self.vm_name = version.pick({version.LOWEST: "VMs", '5.5': "VMs and Instances"}) self.template_name = "Templates" def _form_mapping(self, create=None, **kwargs): return {'name_text': kwargs.get('name')} @variable(alias='db') def num_datastore(self): storage_table_name = version.pick({version.LOWEST: 'hosts_storages', '5.5.0.8': 'host_storages'}) """ Returns the providers number of templates, as shown on the Details page.""" results = list(cfmedb().engine.execute( 'SELECT DISTINCT storages.name, hosts.ems_id ' 'FROM ext_management_systems, hosts, storages, {} ' 'WHERE hosts.id={}.host_id AND ' 'storages.id={}.storage_id AND ' 'hosts.ems_id=ext_management_systems.id AND ' 'ext_management_systems.name=\'{}\''.format(storage_table_name, storage_table_name, storage_table_name, self.name))) return len(results) @num_datastore.variant('ui') def num_datastore_ui(self): return int(self.get_detail("Relationships", "Datastores")) @variable(alias='rest') def num_host(self): provider = rest_api().collections.providers.find_by(name=self.name)[0] num_host = 0 for host in rest_api().collections.hosts: if host['ems_id'] == provider.id: num_host += 1 return num_host @num_host.variant('db') def num_host_db(self): ext_management_systems = cfmedb()["ext_management_systems"] hosts = cfmedb()["hosts"] hostlist = list(cfmedb().session.query(hosts.name) .join(ext_management_systems, hosts.ems_id == ext_management_systems.id) .filter(ext_management_systems.name == self.name)) return len(hostlist) @num_host.variant('ui') def num_host_ui(self): if version.current_version() < "5.6": host_src = "host.png" node_src = "node.png" else: host_src = "host-" node_src = "node-" try: num = int(self.get_detail("Relationships", host_src, use_icon=True)) except sel.NoSuchElementException: logger.error("Couldn't find number of hosts using key [Hosts] trying Nodes") num = int(self.get_detail("Relationships", node_src, use_icon=True)) return num @variable(alias='rest') def num_cluster(self): provider = rest_api().collections.providers.find_by(name=self.name)[0] num_cluster = 0 for cluster in rest_api().collections.clusters: if cluster['ems_id'] == provider.id: num_cluster += 1 return num_cluster @num_cluster.variant('db') def num_cluster_db(self): """ Returns the providers number of templates, as shown on the Details page.""" ext_management_systems = cfmedb()["ext_management_systems"] clusters = cfmedb()["ems_clusters"] clulist = list(cfmedb().session.query(clusters.name) .join(ext_management_systems, clusters.ems_id == ext_management_systems.id) .filter(ext_management_systems.name == self.name)) return len(clulist) @num_cluster.variant('ui') def num_cluster_ui(self): if version.current_version() < "5.6": return int(self.get_detail("Relationships", "cluster.png", use_icon=True)) else: return int(self.get_detail("Relationships", "cluster-", use_icon=True)) def discover(self): """ Begins provider discovery from a provider instance Usage: discover_from_config(utils.providers.get_crud('rhevm')) """ vmware = isinstance(self, VMwareProvider) rhevm = isinstance(self, RHEVMProvider) scvmm = isinstance(self, SCVMMProvider) discover(rhevm, vmware, scvmm, cancel=False, start_ip=self.start_ip, end_ip=self.end_ip) @property def hosts(self): """Returns list of :py:class:`cfme.infrastructure.host.Host` that should belong to this provider according to the YAML """ result = [] for host in self.get_yaml_data().get("hosts", []): creds = conf.credentials.get(host["credentials"], {}) cred = Host.Credential( principal=creds["username"], secret=creds["password"], verify_secret=creds["password"], ) result.append(Host(name=host["name"], credentials=cred)) return result class VMwareProvider(Provider): def __init__(self, name=None, credentials=None, key=None, zone=None, hostname=None, ip_address=None, start_ip=None, end_ip=None, provider_data=None): super(VMwareProvider, self).__init__(name=name, credentials=credentials, zone=zone, key=key, provider_data=provider_data) self.hostname = hostname self.ip_address = ip_address self.start_ip = start_ip self.end_ip = end_ip def _form_mapping(self, create=None, **kwargs): return {'name_text': kwargs.get('name'), 'type_select': create and 'VMware vCenter', 'hostname_text': kwargs.get('hostname'), 'ipaddress_text': kwargs.get('ip_address')} class OpenstackInfraProvider(Provider): STATS_TO_MATCH = ['num_template', 'num_host'] _properties_region = prop_region def __init__(self, name=None, credentials=None, key=None, hostname=None, ip_address=None, start_ip=None, end_ip=None, provider_data=None, sec_protocol=None): super(OpenstackInfraProvider, self).__init__(name=name, credentials=credentials, key=key, provider_data=provider_data) self.hostname = hostname self.ip_address = ip_address self.start_ip = start_ip self.end_ip = end_ip self.sec_protocol = sec_protocol def _form_mapping(self, create=None, **kwargs): data_dict = { 'name_text': kwargs.get('name'), 'type_select': create and 'OpenStack Platform Director', 'hostname_text': kwargs.get('hostname'), 'api_port': kwargs.get('api_port'), 'ipaddress_text': kwargs.get('ip_address'), 'sec_protocol': kwargs.get('sec_protocol'), 'amqp_sec_protocol': kwargs.get('amqp_sec_protocol')} if 'amqp' in self.credentials: data_dict.update({ 'event_selection': 'amqp', 'amqp_hostname_text': kwargs.get('hostname'), 'amqp_api_port': kwargs.get('amqp_api_port', '5672'), 'amqp_sec_protocol': kwargs.get('amqp_sec_protocol', "Non-SSL") }) return data_dict class SCVMMProvider(Provider): STATS_TO_MATCH = ['num_template', 'num_vm'] def __init__(self, name=None, credentials=None, key=None, zone=None, hostname=None, ip_address=None, start_ip=None, end_ip=None, sec_protocol=None, sec_realm=None, provider_data=None): super(SCVMMProvider, self).__init__(name=name, credentials=credentials, zone=zone, key=key, provider_data=provider_data) self.hostname = hostname self.ip_address = ip_address self.start_ip = start_ip self.end_ip = end_ip self.sec_protocol = sec_protocol self.sec_realm = sec_realm def _form_mapping(self, create=None, **kwargs): values = { 'name_text': kwargs.get('name'), 'type_select': create and 'Microsoft System Center VMM', 'hostname_text': kwargs.get('hostname'), 'ipaddress_text': kwargs.get('ip_address'), 'sec_protocol': kwargs.get('sec_protocol') } if 'sec_protocol' in values and values['sec_protocol'] is 'Kerberos': values['sec_realm'] = kwargs.get('sec_realm') return values class RHEVMProvider(Provider): _properties_region = prop_region def __init__(self, name=None, credentials=None, zone=None, key=None, hostname=None, ip_address=None, api_port=None, start_ip=None, end_ip=None, provider_data=None): super(RHEVMProvider, self).__init__(name=name, credentials=credentials, zone=zone, key=key, provider_data=provider_data) self.hostname = hostname self.ip_address = ip_address self.api_port = api_port self.start_ip = start_ip self.end_ip = end_ip def _form_mapping(self, create=None, **kwargs): return {'name_text': kwargs.get('name'), 'type_select': create and 'Red Hat Enterprise Virtualization Manager', 'hostname_text': kwargs.get('hostname'), 'api_port': kwargs.get('api_port'), 'ipaddress_text': kwargs.get('ip_address')} def get_all_providers(do_not_navigate=False): """Returns list of all providers""" if not do_not_navigate: sel.force_navigate('infrastructure_providers') providers = set([]) link_marker = "ems_infra" for page in paginator.pages(): for title in sel.elements("//div[@id='quadicon']/../../../tr/td/a[contains(@href," "'{}/show')]".format(link_marker)): providers.add(sel.get_attribute(title, "title")) return providers def discover(rhevm=False, vmware=False, scvmm=False, cancel=False, start_ip=None, end_ip=None): """ Discover infrastructure providers. Note: only starts discovery, doesn't wait for it to finish. Args: rhvem: Whether to scan for RHEVM providers vmware: Whether to scan for VMware providers scvmm: Whether to scan for SCVMM providers cancel: Whether to cancel out of the discover UI. """ sel.force_navigate('infrastructure_provider_discover') form_data = {} if rhevm: form_data.update({'rhevm_chk': True}) if vmware: form_data.update({'vmware_chk': True}) if scvmm: form_data.update({'scvmm_chk': True}) if start_ip: for idx, octet in enumerate(start_ip.split('.')): key = 'from_%i' % idx form_data.update({key: octet}) if end_ip: end_octet = end_ip.split('.')[-1] form_data.update({'to_3': end_octet}) fill(discover_form, form_data, action=form_buttons.cancel if cancel else discover_form.start_button, action_always=True) def wait_for_a_provider(): sel.force_navigate('infrastructure_providers') logger.info('Waiting for a provider to appear...') wait_for(paginator.rec_total, fail_condition=None, message="Wait for any provider to appear", num_sec=1000, fail_func=sel.refresh)
# -*- coding: utf-8 -*- # These tests don't work at the moment, due to the security_groups multi select not working # in selenium (the group is selected then immediately reset) import fauxfactory import pytest from cfme.common.vm import VM, Template from cfme.common.provider import cleanup_vm from cfme.configure import configuration from cfme.configure.tasks import is_vm_analysis_finished from cfme.control.explorer import PolicyProfile, VMControlPolicy, Action from cfme.fixtures import pytest_selenium as sel from cfme.infrastructure import host, datastore from cfme.provisioning import do_vm_provisioning from cfme.web_ui import InfoBlock, DriftGrid, toolbar from fixtures.pytest_store import store from utils import testgen, ssh, safe_string, version, error from utils.browser import ensure_browser_open from utils.conf import cfme_data from utils.log import logger from utils.wait import wait_for from utils.blockers import GH, BZ pytestmark = [pytest.mark.meta(blockers=["GH#ManageIQ/manageiq:6939"], unblock=lambda provider: provider.type != 'rhevm'), pytest.mark.tier(3)] WINDOWS = {'id': "Red Hat Enterprise Windows", 'icon': 'windows'} RPM_BASED = { 'rhel': { 'id': "Red Hat", 'release-file': '/etc/redhat-release', 'icon': 'linux_redhat', 'package': "kernel", 'install-command': "", # We don't install stuff on RHEL 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, 'centos': { 'id': "CentOS", 'release-file': '/etc/centos-release', 'icon': 'linux_centos', 'package': 'iso-codes', 'install-command': 'yum install -y {}', 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, 'fedora': { 'id': 'Fedora', 'release-file': '/etc/fedora-release', 'icon': 'linux_fedora', 'package': 'iso-codes', 'install-command': 'dnf install -y {}', 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, 'suse': { 'id': 'Suse', 'release-file': '/etc/SuSE-release', 'icon': 'linux_suse', 'package': 'iso-codes', 'install-command': 'zypper install -y {}', 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, } DEB_BASED = { 'ubuntu': { 'id': 'Ubuntu 14.04', 'release-file': '/etc/issue.net', 'icon': 'linux_ubuntu', 'package': 'iso-codes', 'install-command': 'env DEBIAN_FRONTEND=noninteractive apt-get -y install {}', 'package-number': "dpkg --get-selections | wc -l", 'services-number': 'chkconfig --list | wc -l'}, 'debian': { 'id': 'Debian ', 'release-file': '/etc/issue.net', 'icon': 'linux_debian', 'package': 'iso-codes', 'install-command': 'env DEBIAN_FRONTEND=noninteractive apt-get -y install {}', 'package-number': 'dpkg --get-selections | wc -l', 'services-number': 'chkconfig --list | wc -l'}, } ssa_expect_file = "/etc/hosts" def pytest_generate_tests(metafunc): # Filter out providers without templates defined argnames, argvalues, idlist = testgen.all_providers(metafunc) # if metafunc.function is not test_ssa_template: argnames.append('analysis_type') new_idlist = [] new_argvalues = [] for i, argvalue_tuple in enumerate(argvalues): args = dict(zip(argnames, argvalue_tuple)) # if metafunc.function is test_ssa_template: # new_idlist.append(args['provider'].key) # new_argvalues.append([args["provider"]]) # continue vms = [] provisioning_data = [] try: vma_data = args['provider'].data.get('vm_analysis_new', {}) vms = vma_data.get("vms", {}) provisioning_data = vma_data.get("provisioning", {}) except AttributeError: # Provider has no provisioning and/or vms list set continue for vm_analysis_key in vms: # Each VM can redefine a provisioning data vm_analysis_data = provisioning_data.copy() vm_analysis_data.update(vms[vm_analysis_key]) if not {'image', 'fs-type'}.issubset( vm_analysis_data.viewkeys()): continue if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: # Username and password are required for non-windows VMs if not {'username', 'password'}.issubset( vm_analysis_data.viewkeys()): continue # Set VM name here new_idlist.append('{}-{}'.format(idlist[i], vm_analysis_key)) new_argvalues.append([args["provider"], vm_analysis_key]) testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope="module") @pytest.fixture(scope="module") def local_setup_provider(request, setup_provider_modscope, provider, vm_analysis_data): if provider.type == 'rhevm' and version.current_version() < "5.5": # See https://bugzilla.redhat.com/show_bug.cgi?id=1300030 pytest.skip("SSA is not supported on RHEVM for appliances earlier than 5.5 and upstream") if GH("ManageIQ/manageiq:6506").blocks: pytest.skip("Upstream provisioning is blocked by" + "https://github.com/ManageIQ/manageiq/issues/6506") if provider.type == 'virtualcenter': store.current_appliance.install_vddk(reboot=True, wait_for_web_ui_after_reboot=True) ensure_browser_open() set_host_credentials(request, provider, vm_analysis_data) # Make sure all roles are set roles = configuration.get_server_roles(db=False) roles["automate"] = True roles["smartproxy"] = True roles["smartstate"] = True configuration.set_server_roles(**roles) def set_host_credentials(request, provider, vm_analysis_data): # Add credentials to host test_host = host.Host(name=vm_analysis_data['host']) wait_for(lambda: test_host.exists, delay=10, num_sec=120) host_list = cfme_data.get('management_systems', {})[provider.key].get('hosts', []) host_data = [x for x in host_list if x.name == vm_analysis_data['host']][0] if not test_host.has_valid_credentials: test_host.update( updates={'credentials': host.get_credentials_from_config(host_data['credentials'])}, validate_credentials=True ) # Remove creds after test @request.addfinalizer def _host_remove_creds(): test_host.update( updates={'credentials': host.Host.Credential( principal="", secret="", verify_secret="")}, validate_credentials=False ) @pytest.fixture(scope="module") def vm_name(provider, analysis_type): vm_name = 'test_ssa_{}-{}'.format(fauxfactory.gen_alphanumeric(), analysis_type) return vm_name @pytest.fixture(scope="module") def vm_analysis_data(provider, analysis_type): base_data = provider.data.get('vm_analysis_new', {}).get('provisioning', {}) base_data.update(provider.data.get('vm_analysis_new', {}).get('vms', {}).get(analysis_type, {})) return base_data @pytest.fixture(scope="module") def instance(request, local_setup_provider, provider, vm_name, vm_analysis_data): """ Fixture to provision instance on the provider """ template = vm_analysis_data.get('image', None) host_name, datastore_name = map(vm_analysis_data.get, ('host', 'datastore')) mgmt_system = provider.get_mgmt_system() provisioning_data = { 'vm_name': vm_name, 'host_name': {'name': [host_name]}, 'datastore_name': {'name': [datastore_name]}, } try: provisioning_data['vlan'] = vm_analysis_data['vlan'] except KeyError: # provisioning['vlan'] is required for rhevm provisioning if provider.type == 'rhevm': raise pytest.fail('rhevm requires a vlan value in provisioning info') vm = VM.factory(vm_name, provider) connect_ip = None if provider.type == "openstack": image = vm_analysis_data['image'] vm = VM.factory(vm_name, provider, image) request.addfinalizer(vm.delete_from_provider) connect_ip = mgmt_system.get_first_floating_ip() provider.refresh_provider_relationships(method='ui') inst_args = { 'email': 'image_provisioner@example.com', 'first_name': 'Image', 'last_name': 'Provisioner', 'template_name': image, 'notes': ('Testing provisioning from image {} to vm {} on provider {}'.format( image, vm_name, provider.key)), 'instance_type': vm_analysis_data['instance_type'], 'availability_zone': vm_analysis_data['availability_zone'], 'security_groups': [vm_analysis_data['security_group']], 'cloud_network': vm_analysis_data['cloud_network'], 'public_ip_address': connect_ip, } vm.create(**inst_args) else: request.addfinalizer(lambda: cleanup_vm(vm_name, provider)) do_vm_provisioning(template, provider, vm_name, provisioning_data, request, None, num_sec=6000) logger.info("VM %s provisioned, waiting for IP address to be assigned", vm_name) @pytest.wait_for(timeout="20m", delay=5) def get_ip_address(): logger.info("Power state for {} vm: {}, is_vm_stopped: {}".format( vm_name, mgmt_system.vm_status(vm_name), mgmt_system.is_vm_stopped(vm_name))) if mgmt_system.is_vm_stopped(vm_name): mgmt_system.start_vm(vm_name) ip = mgmt_system.current_ip_address(vm_name) logger.info("Fetched IP for %s: %s", vm_name, ip) return ip is not None connect_ip = mgmt_system.get_ip_address(vm_name) assert connect_ip is not None # Check that we can at least get the uptime via ssh this should only be possible # if the username and password have been set via the cloud-init script so # is a valid check if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: logger.info("Waiting for %s to be available via SSH", connect_ip) ssh_client = ssh.SSHClient(hostname=connect_ip, username=vm_analysis_data['username'], password=vm_analysis_data['password'], port=22) wait_for(ssh_client.uptime, num_sec=3600, handle_exception=False) vm.ssh = ssh_client vm.system_type = detect_system_type(vm) logger.info("Detected system type: %s", vm.system_type) vm.image = vm_analysis_data['image'] vm.connect_ip = connect_ip if provider.type == 'rhevm': logger.info("Setting a relationship between VM and appliance") from cfme.infrastructure.virtual_machines import Vm cfme_rel = Vm.CfmeRelationship(vm) cfme_rel.set_relationship(str(configuration.server_name()), configuration.server_id()) return vm @pytest.fixture(scope="module") def policy_profile(request, instance): collected_files = [ {"Name": "/etc/redhat-access-insights/machine-id", "Collect Contents?": True}, {"Name": ssa_expect_file, "Collect Contents?": True}, ] analysis_profile_name = 'ssa_analysis_{}'.format(fauxfactory.gen_alphanumeric()) analysis_profile = configuration.VMAnalysisProfile(analysis_profile_name, analysis_profile_name, categories=["check_system"], files=collected_files) if analysis_profile.exists: analysis_profile.delete() analysis_profile.create() request.addfinalizer(analysis_profile.delete) action = Action( 'ssa_action_{}'.format(fauxfactory.gen_alpha()), "Assign Profile to Analysis Task", dict(analysis_profile=analysis_profile_name)) if action.exists: action.delete() action.create() request.addfinalizer(action.delete) policy = VMControlPolicy('ssa_policy_{}'.format(fauxfactory.gen_alpha())) if policy.exists: policy.delete() policy.create() request.addfinalizer(policy.delete) policy.assign_events("VM Analysis Start") request.addfinalizer(policy.assign_events) policy.assign_actions_to_event("VM Analysis Start", action) profile = PolicyProfile('ssa_policy_profile_{}'.format(fauxfactory.gen_alpha()), policies=[policy]) if profile.exists: profile.delete() profile.create() request.addfinalizer(profile.delete) instance.assign_policy_profiles(profile.description) request.addfinalizer(lambda: instance.unassign_policy_profiles(profile.description)) def detect_system_type(vm): if hasattr(vm, 'ssh'): system_release = safe_string(vm.ssh.run_command("cat /etc/os-release").output) all_systems_dict = RPM_BASED.values() + DEB_BASED.values() for x in all_systems_dict: if x['id'].lower() in system_release.lower(): return x else: return WINDOWS @pytest.mark.tier(1) @pytest.mark.long_running @pytest.mark.meta(blockers=[ BZ(1311134, unblock=lambda provider: provider.type != 'rhevm'), BZ(1311218, unblock=lambda provider: provider.type != 'virtualcenter' or provider.version < "6"), BZ(1320248, unblock=lambda provider: version.current_version() >= "5.5")]) def test_ssa_template(request, local_setup_provider, provider, soft_assert, vm_analysis_data): """ Tests SSA can be performed on a template Metadata: test_flag: vm_analysis """ template_name = vm_analysis_data['image'] template = Template.factory(template_name, provider, template=True) # Set credentials to all hosts set for this datastore if provider.type != 'openstack': datastore_name = vm_analysis_data['datastore'] test_datastore = datastore.Datastore(datastore_name, provider.key) host_list = cfme_data.get('management_systems', {})[provider.key].get('hosts', []) host_names = test_datastore.get_hosts() for host_name in host_names: test_host = host.Host(name=host_name) hosts_data = [x for x in host_list if x.name == host_name] if len(hosts_data) > 0: host_data = hosts_data[0] if not test_host.has_valid_credentials: creds = host.get_credentials_from_config(host_data['credentials']) test_host.update( updates={'credentials': creds}, validate_credentials=True ) template.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(template_name), delay=15, timeout="10m", fail_func=lambda: toolbar.select('Reload')) # Check release and quadricon quadicon_os_icon = template.find_quadicon().os details_os_icon = template.get_detail( properties=('Properties', 'Operating System'), icon_href=True) logger.info("Icons: {}, {}".format(details_os_icon, quadicon_os_icon)) # We shouldn't use get_detail anymore - it takes too much time c_users = InfoBlock.text('Security', 'Users') c_groups = InfoBlock.text('Security', 'Groups') c_packages = 0 if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: c_packages = InfoBlock.text('Configuration', 'Packages') logger.info("SSA shows {} users, {} groups and {} packages".format( c_users, c_groups, c_packages)) if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: soft_assert(c_users != '0', "users: '{}' != '0'".format(c_users)) soft_assert(c_groups != '0', "groups: '{}' != '0'".format(c_groups)) soft_assert(c_packages != '0', "packages: '{}' != '0'".format(c_packages)) else: # Make sure windows-specific data is not empty c_patches = InfoBlock.text('Security', 'Patches') c_applications = InfoBlock.text('Configuration', 'Applications') c_win32_services = InfoBlock.text('Configuration', 'Win32 Services') c_kernel_drivers = InfoBlock.text('Configuration', 'Kernel Drivers') c_fs_drivers = InfoBlock.text('Configuration', 'File System Drivers') soft_assert(c_patches != '0', "patches: '{}' != '0'".format(c_patches)) soft_assert(c_applications != '0', "applications: '{}' != '0'".format(c_applications)) soft_assert(c_win32_services != '0', "win32 services: '{}' != '0'".format(c_win32_services)) soft_assert(c_kernel_drivers != '0', "kernel drivers: '{}' != '0'".format(c_kernel_drivers)) soft_assert(c_fs_drivers != '0', "fs drivers: '{}' != '0'".format(c_fs_drivers)) @pytest.mark.long_running def test_ssa_vm(provider, instance, soft_assert): """ Tests SSA can be performed and returns sane results Metadata: test_flag: vm_analysis """ e_users = None e_groups = None e_packages = None e_services = None e_icon_part = instance.system_type['icon'] if instance.system_type != WINDOWS: e_users = instance.ssh.run_command("cat /etc/passwd | wc -l").output.strip('\n') e_groups = instance.ssh.run_command("cat /etc/group | wc -l").output.strip('\n') e_packages = instance.ssh.run_command( instance.system_type['package-number']).output.strip('\n') e_services = instance.ssh.run_command( instance.system_type['services-number']).output.strip('\n') logger.info("Expecting to have {} users, {} groups, {} packages and {} services".format( e_users, e_groups, e_packages, e_services)) instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check release and quadricon quadicon_os_icon = instance.find_quadicon().os details_os_icon = instance.get_detail( properties=('Properties', 'Operating System'), icon_href=True) logger.info("Icons: %s, %s", details_os_icon, quadicon_os_icon) # We shouldn't use get_detail anymore - it takes too much time c_lastanalyzed = InfoBlock.text('Lifecycle', 'Last Analyzed') c_users = InfoBlock.text('Security', 'Users') c_groups = InfoBlock.text('Security', 'Groups') c_packages = 0 c_services = 0 if instance.system_type != WINDOWS: c_packages = InfoBlock.text('Configuration', 'Packages') c_services = InfoBlock.text('Configuration', 'Init Processes') logger.info("SSA shows {} users, {} groups {} packages and {} services".format( c_users, c_groups, c_packages, c_services)) soft_assert(c_lastanalyzed != 'Never', "Last Analyzed is set to Never") soft_assert(e_icon_part in details_os_icon, "details icon: '{}' not in '{}'".format(e_icon_part, details_os_icon)) soft_assert(e_icon_part in quadicon_os_icon, "quad icon: '{}' not in '{}'".format(e_icon_part, details_os_icon)) if instance.system_type != WINDOWS: soft_assert(c_users == e_users, "users: '{}' != '{}'".format(c_users, e_users)) soft_assert(c_groups == e_groups, "groups: '{}' != '{}'".format(c_groups, e_groups)) soft_assert(c_packages == e_packages, "packages: '{}' != '{}'".format(c_packages, e_packages)) if not BZ("1312971").blocks: soft_assert(c_services == e_services, "services: '{}' != '{}'".format(c_services, e_services)) else: # Make sure windows-specific data is not empty c_patches = InfoBlock.text('Security', 'Patches') c_applications = InfoBlock.text('Configuration', 'Applications') c_win32_services = InfoBlock.text('Configuration', 'Win32 Services') c_kernel_drivers = InfoBlock.text('Configuration', 'Kernel Drivers') c_fs_drivers = InfoBlock.text('Configuration', 'File System Drivers') soft_assert(c_patches != '0', "patches: '{}' != '0'".format(c_patches)) soft_assert(c_applications != '0', "applications: '{}' != '0'".format(c_applications)) soft_assert(c_win32_services != '0', "win32 services: '{}' != '0'".format(c_win32_services)) soft_assert(c_kernel_drivers != '0', "kernel drivers: '{}' != '0'".format(c_kernel_drivers)) soft_assert(c_fs_drivers != '0', "fs drivers: '{}' != '0'".format(c_fs_drivers)) image_label = 'Parent VM' if provider.type == 'openstack': image_label = 'VM Template' # 5.4 doesn't have Parent VM field if version.current_version() > "5.5" and provider.type != 'openstack': c_image = InfoBlock.text('Relationships', image_label) soft_assert(c_image == instance.image, "image: '{}' != '{}'".format(c_image, instance.image)) @pytest.mark.long_running def test_ssa_users(provider, instance, soft_assert): """ Tests SSA fetches correct results for users list Metadata: test_flag: vm_analysis """ username = fauxfactory.gen_alphanumeric() expected = None # In windows case we can't add new users (yet) # So we simply check that user list doesn't cause any Rails errors if instance.system_type != WINDOWS: # Add a new user instance.ssh.run_command("userdel {0} || useradd {0}".format(username)) expected = instance.ssh.run_command("cat /etc/passwd | wc -l").output.strip('\n') instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Security', 'Users')) if instance.system_type != WINDOWS: assert current == expected # Make sure created user is in the list instance.open_details(("Security", "Users")) if instance.system_type != WINDOWS: if not instance.paged_table.find_row_on_all_pages('Name', username): pytest.fail("User {0} was not found".format(username)) @pytest.mark.long_running def test_ssa_groups(provider, instance, soft_assert): """ Tests SSA fetches correct results for groups Metadata: test_flag: vm_analysis """ group = fauxfactory.gen_alphanumeric() expected = None if instance.system_type != WINDOWS: # Add a new group instance.ssh.run_command("groupdel {0} || groupadd {0}".format(group)) expected = instance.ssh.run_command("cat /etc/group | wc -l").output.strip('\n') instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Security', 'Groups')) if instance.system_type != WINDOWS: assert current == expected # Make sure created group is in the list instance.open_details(("Security", "Groups")) if instance.system_type != WINDOWS: if not instance.paged_table.find_row_on_all_pages('Name', group): pytest.fail("Group {0} was not found".format(group)) @pytest.mark.long_running def test_ssa_packages(provider, instance, soft_assert): """ Tests SSA fetches correct results for packages Metadata: test_flag: vm_analysis """ if instance.system_type == WINDOWS: pytest.skip("Windows has no packages") expected = None if 'package' not in instance.system_type.keys(): pytest.skip("Don't know how to update packages for {}".format(instance.system_type)) package_name = instance.system_type['package'] package_command = instance.system_type['install-command'] package_number_command = instance.system_type['package-number'] cmd = package_command.format(package_name) output = instance.ssh.run_command(cmd.format(package_name)).output logger.info("%s output:\n%s", cmd, output) expected = instance.ssh.run_command(package_number_command).output.strip('\n') instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Configuration', 'Packages')) assert current == expected # Make sure new package is listed instance.open_details(("Configuration", "Packages")) if not instance.paged_table.find_row_on_all_pages('Name', package_name): pytest.fail("Package {0} was not found".format(package_name)) @pytest.mark.long_running def test_ssa_files(provider, instance, policy_profile, soft_assert): """Tests that instances can be scanned for specific file.""" if instance.system_type == WINDOWS: pytest.skip("We cannot verify Windows files yet") instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Configuration', 'Files')) assert current != '0', "No files were scanned" instance.open_details(("Configuration", "Files")) if not instance.paged_table.find_row_on_all_pages('Name', ssa_expect_file): pytest.fail("File {0} was not found".format(ssa_expect_file)) @pytest.mark.long_running def test_drift_analysis(request, provider, instance, soft_assert): """ Tests drift analysis is correct Metadata: test_flag: vm_analysis """ instance.load_details() drift_num_orig = 0 drift_orig = InfoBlock("Relationships", "Drift History").text if drift_orig != 'None': drift_num_orig = int(drift_orig) instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) instance.load_details() wait_for( lambda: int(InfoBlock("Relationships", "Drift History").text) == drift_num_orig + 1, delay=20, num_sec=120, message="Waiting for Drift History count to increase", fail_func=sel.refresh ) drift_new = int(InfoBlock("Relationships", "Drift History").text) # add a tag and a finalizer to remove it tag = ('Department', 'Accounting') instance.add_tag(tag, single_value=False) request.addfinalizer(lambda: instance.remove_tag(tag)) instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) instance.load_details() wait_for( lambda: int(InfoBlock("Relationships", "Drift History").text) == drift_new + 1, delay=20, num_sec=120, message="Waiting for Drift History count to increase", fail_func=sel.refresh ) # check drift difference soft_assert(not instance.equal_drift_results('Department (1)', 'My Company Tags', 0, 1), "Drift analysis results are equal when they shouldn't be") # Test UI features that modify the drift grid d_grid = DriftGrid() # Accounting tag should not be displayed, because it was changed to True toolbar.select("Attributes with same values") with error.expected(sel.NoSuchElementException): d_grid.get_cell('Accounting', 0) # Accounting tag should be displayed now toolbar.select("Attributes with different values") d_grid.get_cell('Accounting', 0)
lehinevych/cfme_tests
cfme/tests/infrastructure/test_instance_analysis.py
cfme/infrastructure/provider.py
# -*- coding: utf-8 -*- from utils.db import cfmedb, Db def get_configuration_details(db=None, ip_address=None): """Return details that are necessary to navigate through Configuration accordions. Args: ip_address: IP address of the server to match. If None, uses hostname from ``conf.env['base_url']`` Returns: If the data weren't found in the DB, :py:class:`NoneType` If the data were found, it returns tuple `(region, server name, server id, server zone id)` """ if ip_address is None: ip_address = cfmedb().hostname if db is None: db = Db(hostname=ip_address) SEQ_FACT = 1e12 miq_servers = db['miq_servers'] for region in db.session.query(db['miq_regions']): reg_min = region.region * SEQ_FACT reg_max = reg_min + SEQ_FACT all_servers = db.session.query(miq_servers).all() server = None if len(all_servers) == 1: # If there's only one server, it's the one we want server = all_servers[0] else: # Otherwise, filter based on id and ip address def server_filter(server): return all([ server.id >= reg_min, server.id < reg_max, # XXX: This currently fails due to public/private addresses on openstack server.ipaddress == ip_address ]) servers = filter(server_filter, all_servers) if servers: server = servers[0] if server: return region.region, server.name, server.id, server.zone_id else: return None, None, None, None else: return None def get_zone_description(zone_id, ip_address=None, db=None): if ip_address is None: ip_address = cfmedb().hostname if db is None: db = Db(hostname=ip_address) zones = list( db.session.query(db["zones"]).filter( db["zones"].id == zone_id ) ) if zones: return zones[0].description else: return None def get_host_id(hostname, ip_address=None, db=None): if ip_address is None: ip_address = cfmedb().hostname if db is None: db = Db(hostname=ip_address) hosts = list( db.session.query(db["hosts"]).filter( db["hosts"].name == hostname ) ) if hosts: return str(hosts[0].id) else: return None def check_domain_enabled(domain, ip_address=None, db=None): if ip_address is None: ip_address = cfmedb().hostname if db is None: db = Db(hostname=ip_address) namespaces = db["miq_ae_namespaces"] q = db.session.query(namespaces).filter( namespaces.parent_id == None, namespaces.name == domain) # NOQA (for is/==) try: return list(q)[0].enabled except IndexError: raise KeyError("No such Domain: {}".format(domain))
# -*- coding: utf-8 -*- # These tests don't work at the moment, due to the security_groups multi select not working # in selenium (the group is selected then immediately reset) import fauxfactory import pytest from cfme.common.vm import VM, Template from cfme.common.provider import cleanup_vm from cfme.configure import configuration from cfme.configure.tasks import is_vm_analysis_finished from cfme.control.explorer import PolicyProfile, VMControlPolicy, Action from cfme.fixtures import pytest_selenium as sel from cfme.infrastructure import host, datastore from cfme.provisioning import do_vm_provisioning from cfme.web_ui import InfoBlock, DriftGrid, toolbar from fixtures.pytest_store import store from utils import testgen, ssh, safe_string, version, error from utils.browser import ensure_browser_open from utils.conf import cfme_data from utils.log import logger from utils.wait import wait_for from utils.blockers import GH, BZ pytestmark = [pytest.mark.meta(blockers=["GH#ManageIQ/manageiq:6939"], unblock=lambda provider: provider.type != 'rhevm'), pytest.mark.tier(3)] WINDOWS = {'id': "Red Hat Enterprise Windows", 'icon': 'windows'} RPM_BASED = { 'rhel': { 'id': "Red Hat", 'release-file': '/etc/redhat-release', 'icon': 'linux_redhat', 'package': "kernel", 'install-command': "", # We don't install stuff on RHEL 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, 'centos': { 'id': "CentOS", 'release-file': '/etc/centos-release', 'icon': 'linux_centos', 'package': 'iso-codes', 'install-command': 'yum install -y {}', 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, 'fedora': { 'id': 'Fedora', 'release-file': '/etc/fedora-release', 'icon': 'linux_fedora', 'package': 'iso-codes', 'install-command': 'dnf install -y {}', 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, 'suse': { 'id': 'Suse', 'release-file': '/etc/SuSE-release', 'icon': 'linux_suse', 'package': 'iso-codes', 'install-command': 'zypper install -y {}', 'package-number': 'rpm -qa | wc -l', 'services-number': 'systemctl -a --type service -o cat --no-legend --no-pager | wc -l'}, } DEB_BASED = { 'ubuntu': { 'id': 'Ubuntu 14.04', 'release-file': '/etc/issue.net', 'icon': 'linux_ubuntu', 'package': 'iso-codes', 'install-command': 'env DEBIAN_FRONTEND=noninteractive apt-get -y install {}', 'package-number': "dpkg --get-selections | wc -l", 'services-number': 'chkconfig --list | wc -l'}, 'debian': { 'id': 'Debian ', 'release-file': '/etc/issue.net', 'icon': 'linux_debian', 'package': 'iso-codes', 'install-command': 'env DEBIAN_FRONTEND=noninteractive apt-get -y install {}', 'package-number': 'dpkg --get-selections | wc -l', 'services-number': 'chkconfig --list | wc -l'}, } ssa_expect_file = "/etc/hosts" def pytest_generate_tests(metafunc): # Filter out providers without templates defined argnames, argvalues, idlist = testgen.all_providers(metafunc) # if metafunc.function is not test_ssa_template: argnames.append('analysis_type') new_idlist = [] new_argvalues = [] for i, argvalue_tuple in enumerate(argvalues): args = dict(zip(argnames, argvalue_tuple)) # if metafunc.function is test_ssa_template: # new_idlist.append(args['provider'].key) # new_argvalues.append([args["provider"]]) # continue vms = [] provisioning_data = [] try: vma_data = args['provider'].data.get('vm_analysis_new', {}) vms = vma_data.get("vms", {}) provisioning_data = vma_data.get("provisioning", {}) except AttributeError: # Provider has no provisioning and/or vms list set continue for vm_analysis_key in vms: # Each VM can redefine a provisioning data vm_analysis_data = provisioning_data.copy() vm_analysis_data.update(vms[vm_analysis_key]) if not {'image', 'fs-type'}.issubset( vm_analysis_data.viewkeys()): continue if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: # Username and password are required for non-windows VMs if not {'username', 'password'}.issubset( vm_analysis_data.viewkeys()): continue # Set VM name here new_idlist.append('{}-{}'.format(idlist[i], vm_analysis_key)) new_argvalues.append([args["provider"], vm_analysis_key]) testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope="module") @pytest.fixture(scope="module") def local_setup_provider(request, setup_provider_modscope, provider, vm_analysis_data): if provider.type == 'rhevm' and version.current_version() < "5.5": # See https://bugzilla.redhat.com/show_bug.cgi?id=1300030 pytest.skip("SSA is not supported on RHEVM for appliances earlier than 5.5 and upstream") if GH("ManageIQ/manageiq:6506").blocks: pytest.skip("Upstream provisioning is blocked by" + "https://github.com/ManageIQ/manageiq/issues/6506") if provider.type == 'virtualcenter': store.current_appliance.install_vddk(reboot=True, wait_for_web_ui_after_reboot=True) ensure_browser_open() set_host_credentials(request, provider, vm_analysis_data) # Make sure all roles are set roles = configuration.get_server_roles(db=False) roles["automate"] = True roles["smartproxy"] = True roles["smartstate"] = True configuration.set_server_roles(**roles) def set_host_credentials(request, provider, vm_analysis_data): # Add credentials to host test_host = host.Host(name=vm_analysis_data['host']) wait_for(lambda: test_host.exists, delay=10, num_sec=120) host_list = cfme_data.get('management_systems', {})[provider.key].get('hosts', []) host_data = [x for x in host_list if x.name == vm_analysis_data['host']][0] if not test_host.has_valid_credentials: test_host.update( updates={'credentials': host.get_credentials_from_config(host_data['credentials'])}, validate_credentials=True ) # Remove creds after test @request.addfinalizer def _host_remove_creds(): test_host.update( updates={'credentials': host.Host.Credential( principal="", secret="", verify_secret="")}, validate_credentials=False ) @pytest.fixture(scope="module") def vm_name(provider, analysis_type): vm_name = 'test_ssa_{}-{}'.format(fauxfactory.gen_alphanumeric(), analysis_type) return vm_name @pytest.fixture(scope="module") def vm_analysis_data(provider, analysis_type): base_data = provider.data.get('vm_analysis_new', {}).get('provisioning', {}) base_data.update(provider.data.get('vm_analysis_new', {}).get('vms', {}).get(analysis_type, {})) return base_data @pytest.fixture(scope="module") def instance(request, local_setup_provider, provider, vm_name, vm_analysis_data): """ Fixture to provision instance on the provider """ template = vm_analysis_data.get('image', None) host_name, datastore_name = map(vm_analysis_data.get, ('host', 'datastore')) mgmt_system = provider.get_mgmt_system() provisioning_data = { 'vm_name': vm_name, 'host_name': {'name': [host_name]}, 'datastore_name': {'name': [datastore_name]}, } try: provisioning_data['vlan'] = vm_analysis_data['vlan'] except KeyError: # provisioning['vlan'] is required for rhevm provisioning if provider.type == 'rhevm': raise pytest.fail('rhevm requires a vlan value in provisioning info') vm = VM.factory(vm_name, provider) connect_ip = None if provider.type == "openstack": image = vm_analysis_data['image'] vm = VM.factory(vm_name, provider, image) request.addfinalizer(vm.delete_from_provider) connect_ip = mgmt_system.get_first_floating_ip() provider.refresh_provider_relationships(method='ui') inst_args = { 'email': 'image_provisioner@example.com', 'first_name': 'Image', 'last_name': 'Provisioner', 'template_name': image, 'notes': ('Testing provisioning from image {} to vm {} on provider {}'.format( image, vm_name, provider.key)), 'instance_type': vm_analysis_data['instance_type'], 'availability_zone': vm_analysis_data['availability_zone'], 'security_groups': [vm_analysis_data['security_group']], 'cloud_network': vm_analysis_data['cloud_network'], 'public_ip_address': connect_ip, } vm.create(**inst_args) else: request.addfinalizer(lambda: cleanup_vm(vm_name, provider)) do_vm_provisioning(template, provider, vm_name, provisioning_data, request, None, num_sec=6000) logger.info("VM %s provisioned, waiting for IP address to be assigned", vm_name) @pytest.wait_for(timeout="20m", delay=5) def get_ip_address(): logger.info("Power state for {} vm: {}, is_vm_stopped: {}".format( vm_name, mgmt_system.vm_status(vm_name), mgmt_system.is_vm_stopped(vm_name))) if mgmt_system.is_vm_stopped(vm_name): mgmt_system.start_vm(vm_name) ip = mgmt_system.current_ip_address(vm_name) logger.info("Fetched IP for %s: %s", vm_name, ip) return ip is not None connect_ip = mgmt_system.get_ip_address(vm_name) assert connect_ip is not None # Check that we can at least get the uptime via ssh this should only be possible # if the username and password have been set via the cloud-init script so # is a valid check if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: logger.info("Waiting for %s to be available via SSH", connect_ip) ssh_client = ssh.SSHClient(hostname=connect_ip, username=vm_analysis_data['username'], password=vm_analysis_data['password'], port=22) wait_for(ssh_client.uptime, num_sec=3600, handle_exception=False) vm.ssh = ssh_client vm.system_type = detect_system_type(vm) logger.info("Detected system type: %s", vm.system_type) vm.image = vm_analysis_data['image'] vm.connect_ip = connect_ip if provider.type == 'rhevm': logger.info("Setting a relationship between VM and appliance") from cfme.infrastructure.virtual_machines import Vm cfme_rel = Vm.CfmeRelationship(vm) cfme_rel.set_relationship(str(configuration.server_name()), configuration.server_id()) return vm @pytest.fixture(scope="module") def policy_profile(request, instance): collected_files = [ {"Name": "/etc/redhat-access-insights/machine-id", "Collect Contents?": True}, {"Name": ssa_expect_file, "Collect Contents?": True}, ] analysis_profile_name = 'ssa_analysis_{}'.format(fauxfactory.gen_alphanumeric()) analysis_profile = configuration.VMAnalysisProfile(analysis_profile_name, analysis_profile_name, categories=["check_system"], files=collected_files) if analysis_profile.exists: analysis_profile.delete() analysis_profile.create() request.addfinalizer(analysis_profile.delete) action = Action( 'ssa_action_{}'.format(fauxfactory.gen_alpha()), "Assign Profile to Analysis Task", dict(analysis_profile=analysis_profile_name)) if action.exists: action.delete() action.create() request.addfinalizer(action.delete) policy = VMControlPolicy('ssa_policy_{}'.format(fauxfactory.gen_alpha())) if policy.exists: policy.delete() policy.create() request.addfinalizer(policy.delete) policy.assign_events("VM Analysis Start") request.addfinalizer(policy.assign_events) policy.assign_actions_to_event("VM Analysis Start", action) profile = PolicyProfile('ssa_policy_profile_{}'.format(fauxfactory.gen_alpha()), policies=[policy]) if profile.exists: profile.delete() profile.create() request.addfinalizer(profile.delete) instance.assign_policy_profiles(profile.description) request.addfinalizer(lambda: instance.unassign_policy_profiles(profile.description)) def detect_system_type(vm): if hasattr(vm, 'ssh'): system_release = safe_string(vm.ssh.run_command("cat /etc/os-release").output) all_systems_dict = RPM_BASED.values() + DEB_BASED.values() for x in all_systems_dict: if x['id'].lower() in system_release.lower(): return x else: return WINDOWS @pytest.mark.tier(1) @pytest.mark.long_running @pytest.mark.meta(blockers=[ BZ(1311134, unblock=lambda provider: provider.type != 'rhevm'), BZ(1311218, unblock=lambda provider: provider.type != 'virtualcenter' or provider.version < "6"), BZ(1320248, unblock=lambda provider: version.current_version() >= "5.5")]) def test_ssa_template(request, local_setup_provider, provider, soft_assert, vm_analysis_data): """ Tests SSA can be performed on a template Metadata: test_flag: vm_analysis """ template_name = vm_analysis_data['image'] template = Template.factory(template_name, provider, template=True) # Set credentials to all hosts set for this datastore if provider.type != 'openstack': datastore_name = vm_analysis_data['datastore'] test_datastore = datastore.Datastore(datastore_name, provider.key) host_list = cfme_data.get('management_systems', {})[provider.key].get('hosts', []) host_names = test_datastore.get_hosts() for host_name in host_names: test_host = host.Host(name=host_name) hosts_data = [x for x in host_list if x.name == host_name] if len(hosts_data) > 0: host_data = hosts_data[0] if not test_host.has_valid_credentials: creds = host.get_credentials_from_config(host_data['credentials']) test_host.update( updates={'credentials': creds}, validate_credentials=True ) template.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(template_name), delay=15, timeout="10m", fail_func=lambda: toolbar.select('Reload')) # Check release and quadricon quadicon_os_icon = template.find_quadicon().os details_os_icon = template.get_detail( properties=('Properties', 'Operating System'), icon_href=True) logger.info("Icons: {}, {}".format(details_os_icon, quadicon_os_icon)) # We shouldn't use get_detail anymore - it takes too much time c_users = InfoBlock.text('Security', 'Users') c_groups = InfoBlock.text('Security', 'Groups') c_packages = 0 if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: c_packages = InfoBlock.text('Configuration', 'Packages') logger.info("SSA shows {} users, {} groups and {} packages".format( c_users, c_groups, c_packages)) if vm_analysis_data['fs-type'] not in ['ntfs', 'fat32']: soft_assert(c_users != '0', "users: '{}' != '0'".format(c_users)) soft_assert(c_groups != '0', "groups: '{}' != '0'".format(c_groups)) soft_assert(c_packages != '0', "packages: '{}' != '0'".format(c_packages)) else: # Make sure windows-specific data is not empty c_patches = InfoBlock.text('Security', 'Patches') c_applications = InfoBlock.text('Configuration', 'Applications') c_win32_services = InfoBlock.text('Configuration', 'Win32 Services') c_kernel_drivers = InfoBlock.text('Configuration', 'Kernel Drivers') c_fs_drivers = InfoBlock.text('Configuration', 'File System Drivers') soft_assert(c_patches != '0', "patches: '{}' != '0'".format(c_patches)) soft_assert(c_applications != '0', "applications: '{}' != '0'".format(c_applications)) soft_assert(c_win32_services != '0', "win32 services: '{}' != '0'".format(c_win32_services)) soft_assert(c_kernel_drivers != '0', "kernel drivers: '{}' != '0'".format(c_kernel_drivers)) soft_assert(c_fs_drivers != '0', "fs drivers: '{}' != '0'".format(c_fs_drivers)) @pytest.mark.long_running def test_ssa_vm(provider, instance, soft_assert): """ Tests SSA can be performed and returns sane results Metadata: test_flag: vm_analysis """ e_users = None e_groups = None e_packages = None e_services = None e_icon_part = instance.system_type['icon'] if instance.system_type != WINDOWS: e_users = instance.ssh.run_command("cat /etc/passwd | wc -l").output.strip('\n') e_groups = instance.ssh.run_command("cat /etc/group | wc -l").output.strip('\n') e_packages = instance.ssh.run_command( instance.system_type['package-number']).output.strip('\n') e_services = instance.ssh.run_command( instance.system_type['services-number']).output.strip('\n') logger.info("Expecting to have {} users, {} groups, {} packages and {} services".format( e_users, e_groups, e_packages, e_services)) instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check release and quadricon quadicon_os_icon = instance.find_quadicon().os details_os_icon = instance.get_detail( properties=('Properties', 'Operating System'), icon_href=True) logger.info("Icons: %s, %s", details_os_icon, quadicon_os_icon) # We shouldn't use get_detail anymore - it takes too much time c_lastanalyzed = InfoBlock.text('Lifecycle', 'Last Analyzed') c_users = InfoBlock.text('Security', 'Users') c_groups = InfoBlock.text('Security', 'Groups') c_packages = 0 c_services = 0 if instance.system_type != WINDOWS: c_packages = InfoBlock.text('Configuration', 'Packages') c_services = InfoBlock.text('Configuration', 'Init Processes') logger.info("SSA shows {} users, {} groups {} packages and {} services".format( c_users, c_groups, c_packages, c_services)) soft_assert(c_lastanalyzed != 'Never', "Last Analyzed is set to Never") soft_assert(e_icon_part in details_os_icon, "details icon: '{}' not in '{}'".format(e_icon_part, details_os_icon)) soft_assert(e_icon_part in quadicon_os_icon, "quad icon: '{}' not in '{}'".format(e_icon_part, details_os_icon)) if instance.system_type != WINDOWS: soft_assert(c_users == e_users, "users: '{}' != '{}'".format(c_users, e_users)) soft_assert(c_groups == e_groups, "groups: '{}' != '{}'".format(c_groups, e_groups)) soft_assert(c_packages == e_packages, "packages: '{}' != '{}'".format(c_packages, e_packages)) if not BZ("1312971").blocks: soft_assert(c_services == e_services, "services: '{}' != '{}'".format(c_services, e_services)) else: # Make sure windows-specific data is not empty c_patches = InfoBlock.text('Security', 'Patches') c_applications = InfoBlock.text('Configuration', 'Applications') c_win32_services = InfoBlock.text('Configuration', 'Win32 Services') c_kernel_drivers = InfoBlock.text('Configuration', 'Kernel Drivers') c_fs_drivers = InfoBlock.text('Configuration', 'File System Drivers') soft_assert(c_patches != '0', "patches: '{}' != '0'".format(c_patches)) soft_assert(c_applications != '0', "applications: '{}' != '0'".format(c_applications)) soft_assert(c_win32_services != '0', "win32 services: '{}' != '0'".format(c_win32_services)) soft_assert(c_kernel_drivers != '0', "kernel drivers: '{}' != '0'".format(c_kernel_drivers)) soft_assert(c_fs_drivers != '0', "fs drivers: '{}' != '0'".format(c_fs_drivers)) image_label = 'Parent VM' if provider.type == 'openstack': image_label = 'VM Template' # 5.4 doesn't have Parent VM field if version.current_version() > "5.5" and provider.type != 'openstack': c_image = InfoBlock.text('Relationships', image_label) soft_assert(c_image == instance.image, "image: '{}' != '{}'".format(c_image, instance.image)) @pytest.mark.long_running def test_ssa_users(provider, instance, soft_assert): """ Tests SSA fetches correct results for users list Metadata: test_flag: vm_analysis """ username = fauxfactory.gen_alphanumeric() expected = None # In windows case we can't add new users (yet) # So we simply check that user list doesn't cause any Rails errors if instance.system_type != WINDOWS: # Add a new user instance.ssh.run_command("userdel {0} || useradd {0}".format(username)) expected = instance.ssh.run_command("cat /etc/passwd | wc -l").output.strip('\n') instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Security', 'Users')) if instance.system_type != WINDOWS: assert current == expected # Make sure created user is in the list instance.open_details(("Security", "Users")) if instance.system_type != WINDOWS: if not instance.paged_table.find_row_on_all_pages('Name', username): pytest.fail("User {0} was not found".format(username)) @pytest.mark.long_running def test_ssa_groups(provider, instance, soft_assert): """ Tests SSA fetches correct results for groups Metadata: test_flag: vm_analysis """ group = fauxfactory.gen_alphanumeric() expected = None if instance.system_type != WINDOWS: # Add a new group instance.ssh.run_command("groupdel {0} || groupadd {0}".format(group)) expected = instance.ssh.run_command("cat /etc/group | wc -l").output.strip('\n') instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Security', 'Groups')) if instance.system_type != WINDOWS: assert current == expected # Make sure created group is in the list instance.open_details(("Security", "Groups")) if instance.system_type != WINDOWS: if not instance.paged_table.find_row_on_all_pages('Name', group): pytest.fail("Group {0} was not found".format(group)) @pytest.mark.long_running def test_ssa_packages(provider, instance, soft_assert): """ Tests SSA fetches correct results for packages Metadata: test_flag: vm_analysis """ if instance.system_type == WINDOWS: pytest.skip("Windows has no packages") expected = None if 'package' not in instance.system_type.keys(): pytest.skip("Don't know how to update packages for {}".format(instance.system_type)) package_name = instance.system_type['package'] package_command = instance.system_type['install-command'] package_number_command = instance.system_type['package-number'] cmd = package_command.format(package_name) output = instance.ssh.run_command(cmd.format(package_name)).output logger.info("%s output:\n%s", cmd, output) expected = instance.ssh.run_command(package_number_command).output.strip('\n') instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Configuration', 'Packages')) assert current == expected # Make sure new package is listed instance.open_details(("Configuration", "Packages")) if not instance.paged_table.find_row_on_all_pages('Name', package_name): pytest.fail("Package {0} was not found".format(package_name)) @pytest.mark.long_running def test_ssa_files(provider, instance, policy_profile, soft_assert): """Tests that instances can be scanned for specific file.""" if instance.system_type == WINDOWS: pytest.skip("We cannot verify Windows files yet") instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) # Check that all data has been fetched current = instance.get_detail(properties=('Configuration', 'Files')) assert current != '0', "No files were scanned" instance.open_details(("Configuration", "Files")) if not instance.paged_table.find_row_on_all_pages('Name', ssa_expect_file): pytest.fail("File {0} was not found".format(ssa_expect_file)) @pytest.mark.long_running def test_drift_analysis(request, provider, instance, soft_assert): """ Tests drift analysis is correct Metadata: test_flag: vm_analysis """ instance.load_details() drift_num_orig = 0 drift_orig = InfoBlock("Relationships", "Drift History").text if drift_orig != 'None': drift_num_orig = int(drift_orig) instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) instance.load_details() wait_for( lambda: int(InfoBlock("Relationships", "Drift History").text) == drift_num_orig + 1, delay=20, num_sec=120, message="Waiting for Drift History count to increase", fail_func=sel.refresh ) drift_new = int(InfoBlock("Relationships", "Drift History").text) # add a tag and a finalizer to remove it tag = ('Department', 'Accounting') instance.add_tag(tag, single_value=False) request.addfinalizer(lambda: instance.remove_tag(tag)) instance.smartstate_scan() wait_for(lambda: is_vm_analysis_finished(instance.name), delay=15, timeout="15m", fail_func=lambda: toolbar.select('Reload')) instance.load_details() wait_for( lambda: int(InfoBlock("Relationships", "Drift History").text) == drift_new + 1, delay=20, num_sec=120, message="Waiting for Drift History count to increase", fail_func=sel.refresh ) # check drift difference soft_assert(not instance.equal_drift_results('Department (1)', 'My Company Tags', 0, 1), "Drift analysis results are equal when they shouldn't be") # Test UI features that modify the drift grid d_grid = DriftGrid() # Accounting tag should not be displayed, because it was changed to True toolbar.select("Attributes with same values") with error.expected(sel.NoSuchElementException): d_grid.get_cell('Accounting', 0) # Accounting tag should be displayed now toolbar.select("Attributes with different values") d_grid.get_cell('Accounting', 0)
lehinevych/cfme_tests
cfme/tests/infrastructure/test_instance_analysis.py
utils/db_queries.py
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from ..core.has_props import abstract from ..core.properties import ( AnyRef, Bool, Int, NonNullable, Nullable, RestrictedDict, Seq, String, ) from ..model import Model #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'BooleanFilter', 'CustomJSFilter', 'Filter', 'GroupFilter', 'IndexFilter', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- @abstract class Filter(Model): ''' A Filter model represents a filtering operation that returns a row-wise subset of data when applied to a ``ColumnDataSource``. ''' class IndexFilter(Filter): ''' An ``IndexFilter`` filters data by returning the subset of data at a given set of indices. ''' indices = Nullable(Seq(Int), help=""" A list of integer indices representing the subset of data to select. """) def __init__(self, *args, **kw) -> None: if len(args) == 1 and "indices" not in kw: kw["indices"] = args[0] super().__init__(**kw) class BooleanFilter(Filter): ''' A ``BooleanFilter`` filters data by returning the subset of data corresponding to indices where the values of the booleans array is True. ''' booleans = Nullable(Seq(Bool), help=""" A list of booleans indicating which rows of data to select. """) def __init__(self, *args, **kw) -> None: if len(args) == 1 and "booleans" not in kw: kw["booleans"] = args[0] super().__init__(**kw) class GroupFilter(Filter): ''' A ``GroupFilter`` represents the rows of a ``ColumnDataSource`` where the values of the categorical column column_name match the group variable. ''' column_name = NonNullable(String, help=""" The name of the column to perform the group filtering operation on. """) group = NonNullable(String, help=""" The value of the column indicating the rows of data to keep. """) def __init__(self, *args, **kw) -> None: if len(args) == 2 and "column_name" not in kw and "group" not in kw: kw["column_name"] = args[0] kw["group"] = args[1] super().__init__(**kw) class CustomJSFilter(Filter): ''' Filter data sources with a custom defined JavaScript function. .. warning:: The explicit purpose of this Bokeh Model is to embed *raw JavaScript code* for a browser to execute. If any part of the code is derived from untrusted user inputs, then you must take appropriate care to sanitize the user input prior to passing to Bokeh. ''' args = RestrictedDict(String, AnyRef, disallow=("source",), help=""" A mapping of names to Python objects. In particular those can be bokeh's models. These objects are made available to the callback's code snippet as the values of named parameters to the callback. """) code = String(default="", help=""" A snippet of JavaScript code to filter data contained in a columnar data source. The code is made into the body of a function, and all of of the named objects in ``args`` are available as parameters that the code can use. The variable ``source`` will contain the data source that is associated with the ``CDSView`` this filter is added to. The code should either return the indices of the subset or an array of booleans to use to subset data source rows. Example: .. code-block code = ''' const indices = [] for (let i = 0; i <= source.data['some_column'].length; i++) { if (source.data['some_column'][i] == 'some_value') { indices.push(i) } } return indices ''' """) #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations # isort:skip import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from bokeh.models import Button, Div, Plot # Module under test from bokeh import events # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- concrete_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.Event) and v.event_name is not None} point_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.PointEvent)} #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class EventCallback: def __init__(self, attributes=[]) -> None: self.event_name = None self.attributes = attributes self.payload = {} def __call__(self, event): self.event_name = event.event_name self.payload = {attr:getattr(event, attr) for attr in self.attributes} def test_event_metaclass() -> None: # All events currently in the namespace should be in the EVENT_CLASSES set assert len(concrete_events - set(events._CONCRETE_EVENT_CLASSES.values())) == 0 def test_common_decode_json() -> None: for event_name, event_cls in events._CONCRETE_EVENT_CLASSES.items(): if event_name is None: continue # Skip abstract base class event = events.Event.decode_json({ 'event_name': event_cls.event_name, 'event_values': {'model': {'id': 'test-model-id'}}, }) assert isinstance(event, events.Event) if isinstance(event, events.ModelEvent): assert event._model_id == 'test-model-id' def test_pointevent_subclass_decode_json() -> None: event_values = dict(model_id='test-model-id', sx=3, sy=-2, x=10, y=100) for event_cls in point_events: if event_cls.event_name is None: continue # Skip abstract base class event = events.Event.decode_json({'event_name': event_cls.event_name, 'event_values': event_values.copy()}) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_panevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta_x=0.1, delta_y=0.3, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pan.event_name, 'event_values': event_values.copy()}) assert event.delta_x == 0.1 assert event.delta_y == 0.3 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_mousewheelevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta=-0.1, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.MouseWheel.event_name, 'event_values': event_values.copy()}) assert event.delta == -0.1 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_pinchevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, scale=42, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pinch.event_name, 'event_values': event_values.copy()}) assert event.scale == 42 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_event_constructor_button() -> None: model = Button() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_div() -> None: model = Div() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_plot() -> None: model = Plot() event = events.ModelEvent(model) assert event._model_id == model.id def test_buttonclick_constructor_button() -> None: model = Button() event = events.ButtonClick(model) assert event._model_id == model.id def test_buttonclick_constructor_div() -> None: with pytest.raises(ValueError): events.ButtonClick(Div()) def test_buttonclick_constructor_plot() -> None: with pytest.raises(ValueError): events.ButtonClick(Plot()) def test_lodstart_constructor_button() -> None: with pytest.raises(ValueError): events.LODStart(Button()) def test_lodstart_constructor_div() -> None: with pytest.raises(ValueError): events.LODStart(Div()) def test_lodstart_constructor_plot() -> None: model = Plot() event = events.LODStart(model) assert event._model_id == model.id def test_lodend_constructor_button() -> None: with pytest.raises(ValueError): events.LODEnd(Button()) def test_lodend_constructor_div() -> None: with pytest.raises(ValueError): events.LODEnd(Div()) def test_lodend_constructor_plot() -> None: model = Plot() event = events.LODEnd(model) assert event._model_id == model.id def test_plotevent_constructor_button() -> None: with pytest.raises(ValueError): events.PlotEvent(Button()) def test_plotevent_constructor_div() -> None: with pytest.raises(ValueError): events.PlotEvent(Div()) def test_plotevent_constructor_plot() -> None: model = Plot() event = events.PlotEvent(model) assert event._model_id == model.id def test_pointEvent_constructor_plot() -> None: model = Plot() event = events.PointEvent(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_constructor_button() -> None: with pytest.raises(ValueError): events.PointEvent(Button(), sx=3, sy=-2, x=10, y=100) def test_pointevent_constructor_div() -> None: with pytest.raises(ValueError): events.PointEvent(Div(), sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_plot() -> None: model = Plot() for subcls in point_events: event = subcls(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_subclass_constructor_button() -> None: model = Button() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_div() -> None: model = Div() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) # Testing event callback invocation def test_buttonclick_event_callbacks() -> None: button = Button() test_callback = EventCallback() button.on_event(events.ButtonClick, test_callback) assert test_callback.event_name == None button._trigger_event(events.ButtonClick(button)) assert test_callback.event_name == events.ButtonClick.event_name def test_atomic_plot_event_callbacks() -> None: plot = Plot() for event_cls in [events.LODStart, events.LODEnd]: test_callback = EventCallback() plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot)) assert test_callback.event_name == event_cls.event_name def test_pointevent_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100) for event_cls in point_events: test_callback = EventCallback(['sx','sy','x','y']) plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot, **payload)) assert test_callback.event_name == event_cls.event_name assert test_callback.payload == payload def test_mousewheel_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta=5) test_callback = EventCallback(['sx','sy','x','y', 'delta']) plot.on_event(events.MouseWheel, test_callback) assert test_callback.event_name == None plot._trigger_event(events.MouseWheel(plot, **payload)) assert test_callback.event_name == events.MouseWheel.event_name assert test_callback.payload == payload def test_pan_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta_x=2, delta_y=3.2) test_callback = EventCallback(['sx','sy','x','y', 'delta_x', 'delta_y']) plot.on_event(events.Pan, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pan(plot, **payload)) assert test_callback.event_name == events.Pan.event_name assert test_callback.payload == payload def test_pinch_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, scale=42) test_callback = EventCallback(['sx','sy','x','y', 'scale']) plot.on_event(events.Pinch, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pinch(plot, **payload)) assert test_callback.event_name == events.Pinch.event_name assert test_callback.payload == payload #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
bokeh/bokeh
tests/unit/bokeh/test_events.py
bokeh/models/filters.py
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Display a variety of simple scatter marker shapes whose attributes can be associated with data columns from :class:`~bokeh.models.sources.ColumnDataSource` objects. .. warning:: The individual marker classes in this module are **deprecated since Bokeh 2.3.0.** Please replace all occurrences of ``Marker`` models with :class:`~bokeh.models.glyphs.Scatter` glyphs. For example: instead of ``Asterisk()``, use ``Scatter(marker="asterisk")``. For backwards compatibility, all markers in this module currently link to their respective replacements using the :class:`~bokeh.models.glyphs.Scatter` glyph. The full list of markers accessible through this module: .. toctree:: :maxdepth: 2 By definition, all markers accept the following set of properties: * ``x``, ``y`` position * ``size`` in pixels * ``line``, ``fill``, and ``hatch`` properties * ``angle`` The ``asterisk``, ``cross``, ``dash``, ``dot``, ``x``, and ``y`` only render line components. Those markers ignore any values that are passed to the ``fill`` and ``hatch`` properties. .. note:: When you draw ``circle`` markers with ``Scatter``, you can only assign a size in |screen units| (by passing a number of pixels to the ``size`` property). In case you want to define the radius of circles in |data units|, use the :class:`~bokeh.models.glyphs.Circle` glyph instead of the ``Scatter`` glyph. ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from ..util.deprecation import deprecated from . import glyphs from .glyphs import Circle, Marker, Scatter #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'Asterisk', 'Circle', 'CircleCross', 'CircleDot', 'CircleX', 'CircleY', 'Cross', 'Dash', 'Diamond', 'DiamondCross', 'DiamondDot', 'Dot', 'Hex', 'HexDot', 'InvertedTriangle', 'Marker', 'Plus', 'Scatter', 'Square', 'SquareCross', 'SquareDot', 'SquarePin', 'SquareX', 'Star', 'StarDot', 'Triangle', 'TriangleDot', 'TrianglePin', 'X', 'Y', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- def Asterisk(*args, **kwargs): ''' Render asterisk '*' markers. (deprecated) ''' deprecated((2, 3, 0), "Asterisk()", "Scatter(marker='asterisk')") return Scatter(*args, **kwargs, marker="asterisk") def CircleCross(*args, **kwargs): ''' Render circle markers with a '+' cross through the center. (deprecated) ''' deprecated((2, 3, 0), "CircleCross()", "Scatter(marker='circle_cross')") return Scatter(*args, **kwargs, marker="circle_cross") def CircleDot(*args, **kwargs): ''' Render circle markers with center dots. (deprecated) ''' deprecated((2, 3, 0), "CircleDot()", "Scatter(marker='circle_dot')") return Scatter(*args, **kwargs, marker="circle_dot") def CircleX(*args, **kwargs): ''' Render circle markers with an 'X' cross through the center. (deprecated) ''' deprecated((2, 3, 0), "CircleX()", "Scatter(marker='circle_x')") return Scatter(*args, **kwargs, marker="circle_x") def CircleY(*args, **kwargs): ''' Render circle markers with an 'Y' cross through the center. (deprecated) ''' deprecated((2, 3, 0), "CircleY()", "Scatter(marker='circle_y')") return Scatter(*args, **kwargs, marker="circle_y") def Cross(*args, **kwargs): ''' Render '+' cross markers. (deprecated) ''' deprecated((2, 3, 0), "Cross()", "Scatter(marker='cross')") return Scatter(*args, **kwargs, marker="cross") def Dash(*args, **kwargs): ''' Render dash markers. (deprecated) ''' deprecated((2, 3, 0), "Dash()", "Scatter(marker='dash')") return Scatter(*args, **kwargs, marker="dash") def Diamond(*args, **kwargs): ''' Render diamond markers. (deprecated) ''' deprecated((2, 3, 0), "Diamond()", "Scatter(marker='diamond')") return Scatter(*args, **kwargs, marker="diamond") def DiamondCross(*args, **kwargs): ''' Render diamond markers with a '+' cross through the center. (deprecated) ''' deprecated((2, 3, 0), "DiamondCross()", "Scatter(marker='diamond_cross')") return Scatter(*args, **kwargs, marker="diamond_cross") def DiamondDot(*args, **kwargs): ''' Render diamond markers with center dots. (deprecated) ''' deprecated((2, 3, 0), "DiamondDot()", "Scatter(marker='diamond_dot')") return Scatter(*args, **kwargs, marker="diamond_dot") def Dot(*args, **kwargs): ''' Render dots (one-quarter radius circles). (deprecated) ''' deprecated((2, 3, 0), "Dot()", "Scatter(marker='dot')") return Scatter(*args, **kwargs, marker="dot") def Hex(*args, **kwargs): ''' Render hexagon markers. (deprecated) ''' deprecated((2, 3, 0), "Hex()", "Scatter(marker='hex')") return Scatter(*args, **kwargs, marker="hex") def HexDot(*args, **kwargs): ''' Render hexagon markers with center dots. (deprecated) ''' deprecated((2, 3, 0), "HexDot()", "Scatter(marker='hex_dot')") return Scatter(*args, **kwargs, marker="hex_dot") def InvertedTriangle(*args, **kwargs): ''' Render upside-down triangle markers. (deprecated) ''' deprecated((2, 3, 0), "InvertedTriangle()", "Scatter(marker='inverted_triangle')") return Scatter(*args, **kwargs, marker="inverted_triangle") def Plus(*args, **kwargs): ''' Render filled plus markers ''' deprecated((2, 3, 0), "Plut()", "Scatter(marker='plus')") return Scatter(*args, **kwargs, marker="plus") def Square(*args, **kwargs): ''' Render square markers. (deprecated) ''' deprecated((2, 3, 0), "Square()", "Scatter(marker='square')") return Scatter(*args, **kwargs, marker="square") def SquareDot(*args, **kwargs): ''' Render square markers with center dots. (deprecated) ''' deprecated((2, 3, 0), "SquareDot()", "Scatter(marker='square_dot')") return Scatter(*args, **kwargs, marker="square_dot") def SquarePin(*args, **kwargs): ''' Render pin-cushion square markers. (deprecated) ''' deprecated((2, 3, 0), "SquarePin()", "Scatter(marker='square_pin')") return Scatter(*args, **kwargs, marker="square_pin") def SquareCross(*args, **kwargs): ''' Render square markers with a '+' cross through the center. (deprecated) ''' deprecated((2, 3, 0), "SquareCross()", "Scatter(marker='square_cross')") return Scatter(*args, **kwargs, marker="square_cross") def SquareX(*args, **kwargs): ''' Render square markers with an 'X' cross through the center. (deprecated) ''' deprecated((2, 3, 0), "SquareX()", "Scatter(marker='square_x')") return Scatter(*args, **kwargs, marker="square_x") def Star(*args, **kwargs): ''' Render star markers. (deprecated) ''' deprecated((2, 3, 0), "Star()", "Scatter(marker='star')") return Scatter(*args, **kwargs, marker="star") def StarDot(*args, **kwargs): ''' Render star markers with center dots. (deprecated) ''' deprecated((2, 3, 0), "StarDot()", "Scatter(marker='star_dot')") return Scatter(*args, **kwargs, marker="star_dot") def Triangle(*args, **kwargs): ''' Render triangle markers. (deprecated) ''' deprecated((2, 3, 0), "Triangle()", "Scatter(marker='triangle')") return Scatter(*args, **kwargs, marker="triangle") def TriangleDot(*args, **kwargs): ''' Render triangle markers with center dots. (deprecated) ''' deprecated((2, 3, 0), "TriangleDot()", "Scatter(marker='triangle_dot')") return Scatter(*args, **kwargs, marker="triangle_dot") def TrianglePin(*args, **kwargs): ''' Render pin-cushion triangle markers. (deprecated) ''' deprecated((2, 3, 0), "TrianglePin()", "Scatter(marker='triangle_pin')") return Scatter(*args, **kwargs, marker="triangle_pin") def X(*args, **kwargs): ''' Render 'X' markers. (deprecated) ''' deprecated((2, 3, 0), "X()", "Scatter(marker='x')") return Scatter(*args, **kwargs, marker="x") def Y(*args, **kwargs): ''' Render 'Y' markers. (deprecated) ''' deprecated((2, 3, 0), "Y()", "Scatter(marker='y')") return Scatter(*args, **kwargs, marker="y") #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #----------------------------------------------------------------------------- marker_types = { "asterisk": Asterisk, "circle": Circle, "circle_cross": CircleCross, "circle_dot": CircleDot, "circle_x": CircleX, "circle_y": CircleY, "cross": Cross, "dash": Dash, "diamond": Diamond, "diamond_cross": DiamondCross, "diamond_dot": DiamondDot, "dot": Dot, "hex": Hex, "hex_dot": HexDot, "inverted_triangle": InvertedTriangle, "plus": Plus, "square": Square, "square_cross": SquareCross, "square_dot": SquareDot, "square_pin": SquarePin, "square_x": SquareX, "star": Star, "star_dot": StarDot, "triangle": Triangle, "triangle_dot": TriangleDot, "triangle_pin": TrianglePin, "x": X, "y": Y, } glyphs.Asterisk = Asterisk glyphs.CircleCross = CircleCross glyphs.CircleDot = CircleDot glyphs.CircleY = CircleY glyphs.CircleX = CircleX glyphs.Cross = Cross glyphs.Dash = Dash glyphs.Diamond = Diamond glyphs.DiamondCross = DiamondCross glyphs.DiamondDot = DiamondDot glyphs.Dot = Dot glyphs.Hex = Hex glyphs.HexDot = HexDot glyphs.InvertedTriangle = InvertedTriangle glyphs.Plus = Plus glyphs.Square = Square glyphs.SquareCross = SquareCross glyphs.SquareDot = SquareDot glyphs.SquarePin = SquarePin glyphs.SquareX = SquareX glyphs.Star = Star glyphs.StarDot = StarDot glyphs.Triangle = Triangle glyphs.TriangleDot = TriangleDot glyphs.TrianglePin = TrianglePin glyphs.X = X glyphs.Y = Y
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations # isort:skip import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from bokeh.models import Button, Div, Plot # Module under test from bokeh import events # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- concrete_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.Event) and v.event_name is not None} point_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.PointEvent)} #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class EventCallback: def __init__(self, attributes=[]) -> None: self.event_name = None self.attributes = attributes self.payload = {} def __call__(self, event): self.event_name = event.event_name self.payload = {attr:getattr(event, attr) for attr in self.attributes} def test_event_metaclass() -> None: # All events currently in the namespace should be in the EVENT_CLASSES set assert len(concrete_events - set(events._CONCRETE_EVENT_CLASSES.values())) == 0 def test_common_decode_json() -> None: for event_name, event_cls in events._CONCRETE_EVENT_CLASSES.items(): if event_name is None: continue # Skip abstract base class event = events.Event.decode_json({ 'event_name': event_cls.event_name, 'event_values': {'model': {'id': 'test-model-id'}}, }) assert isinstance(event, events.Event) if isinstance(event, events.ModelEvent): assert event._model_id == 'test-model-id' def test_pointevent_subclass_decode_json() -> None: event_values = dict(model_id='test-model-id', sx=3, sy=-2, x=10, y=100) for event_cls in point_events: if event_cls.event_name is None: continue # Skip abstract base class event = events.Event.decode_json({'event_name': event_cls.event_name, 'event_values': event_values.copy()}) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_panevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta_x=0.1, delta_y=0.3, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pan.event_name, 'event_values': event_values.copy()}) assert event.delta_x == 0.1 assert event.delta_y == 0.3 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_mousewheelevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta=-0.1, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.MouseWheel.event_name, 'event_values': event_values.copy()}) assert event.delta == -0.1 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_pinchevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, scale=42, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pinch.event_name, 'event_values': event_values.copy()}) assert event.scale == 42 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_event_constructor_button() -> None: model = Button() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_div() -> None: model = Div() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_plot() -> None: model = Plot() event = events.ModelEvent(model) assert event._model_id == model.id def test_buttonclick_constructor_button() -> None: model = Button() event = events.ButtonClick(model) assert event._model_id == model.id def test_buttonclick_constructor_div() -> None: with pytest.raises(ValueError): events.ButtonClick(Div()) def test_buttonclick_constructor_plot() -> None: with pytest.raises(ValueError): events.ButtonClick(Plot()) def test_lodstart_constructor_button() -> None: with pytest.raises(ValueError): events.LODStart(Button()) def test_lodstart_constructor_div() -> None: with pytest.raises(ValueError): events.LODStart(Div()) def test_lodstart_constructor_plot() -> None: model = Plot() event = events.LODStart(model) assert event._model_id == model.id def test_lodend_constructor_button() -> None: with pytest.raises(ValueError): events.LODEnd(Button()) def test_lodend_constructor_div() -> None: with pytest.raises(ValueError): events.LODEnd(Div()) def test_lodend_constructor_plot() -> None: model = Plot() event = events.LODEnd(model) assert event._model_id == model.id def test_plotevent_constructor_button() -> None: with pytest.raises(ValueError): events.PlotEvent(Button()) def test_plotevent_constructor_div() -> None: with pytest.raises(ValueError): events.PlotEvent(Div()) def test_plotevent_constructor_plot() -> None: model = Plot() event = events.PlotEvent(model) assert event._model_id == model.id def test_pointEvent_constructor_plot() -> None: model = Plot() event = events.PointEvent(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_constructor_button() -> None: with pytest.raises(ValueError): events.PointEvent(Button(), sx=3, sy=-2, x=10, y=100) def test_pointevent_constructor_div() -> None: with pytest.raises(ValueError): events.PointEvent(Div(), sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_plot() -> None: model = Plot() for subcls in point_events: event = subcls(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_subclass_constructor_button() -> None: model = Button() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_div() -> None: model = Div() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) # Testing event callback invocation def test_buttonclick_event_callbacks() -> None: button = Button() test_callback = EventCallback() button.on_event(events.ButtonClick, test_callback) assert test_callback.event_name == None button._trigger_event(events.ButtonClick(button)) assert test_callback.event_name == events.ButtonClick.event_name def test_atomic_plot_event_callbacks() -> None: plot = Plot() for event_cls in [events.LODStart, events.LODEnd]: test_callback = EventCallback() plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot)) assert test_callback.event_name == event_cls.event_name def test_pointevent_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100) for event_cls in point_events: test_callback = EventCallback(['sx','sy','x','y']) plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot, **payload)) assert test_callback.event_name == event_cls.event_name assert test_callback.payload == payload def test_mousewheel_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta=5) test_callback = EventCallback(['sx','sy','x','y', 'delta']) plot.on_event(events.MouseWheel, test_callback) assert test_callback.event_name == None plot._trigger_event(events.MouseWheel(plot, **payload)) assert test_callback.event_name == events.MouseWheel.event_name assert test_callback.payload == payload def test_pan_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta_x=2, delta_y=3.2) test_callback = EventCallback(['sx','sy','x','y', 'delta_x', 'delta_y']) plot.on_event(events.Pan, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pan(plot, **payload)) assert test_callback.event_name == events.Pan.event_name assert test_callback.payload == payload def test_pinch_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, scale=42) test_callback = EventCallback(['sx','sy','x','y', 'scale']) plot.on_event(events.Pinch, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pinch(plot, **payload)) assert test_callback.event_name == events.Pinch.event_name assert test_callback.payload == payload #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
bokeh/bokeh
tests/unit/bokeh/test_events.py
bokeh/models/markers.py
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Bokeh Application Handler to look for Bokeh server lifecycle callbacks in a specified Python module. ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports import os from types import ModuleType from typing import List # Bokeh imports from ...core.types import PathLike from ...util.callback_manager import _check_callback from .code_runner import CodeRunner from .lifecycle import LifecycleHandler #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'ServerLifecycleHandler', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- class ServerLifecycleHandler(LifecycleHandler): ''' Load a script which contains server lifecycle callbacks. .. autoclasstoc:: ''' def __init__(self, *, filename: PathLike, argv: List[str] = [], package: ModuleType | None = None) -> None: ''' Keyword Args: filename (str) : path to a module to load lifecycle callbacks from argv (list[str], optional) : a list of string arguments to use as ``sys.argv`` when the callback code is executed. (default: []) ''' super().__init__() with open(filename, 'r', encoding='utf-8') as f: source = f.read() self._runner = CodeRunner(source, filename, argv, package=package) if not self._runner.failed: # unlike ScriptHandler, we only load the module one time self._module = self._runner.new_module() def extract_callbacks() -> None: contents = self._module.__dict__ if 'on_server_loaded' in contents: self._on_server_loaded = contents['on_server_loaded'] if 'on_server_unloaded' in contents: self._on_server_unloaded = contents['on_server_unloaded'] if 'on_session_created' in contents: self._on_session_created = contents['on_session_created'] if 'on_session_destroyed' in contents: self._on_session_destroyed = contents['on_session_destroyed'] _check_callback(self._on_server_loaded, ('server_context',), what="on_server_loaded") _check_callback(self._on_server_unloaded, ('server_context',), what="on_server_unloaded") _check_callback(self._on_session_created, ('session_context',), what="on_session_created") _check_callback(self._on_session_destroyed, ('session_context',), what="on_session_destroyed") self._runner.run(self._module, extract_callbacks) # Properties -------------------------------------------------------------- @property def error(self) -> str | None: ''' If the handler fails, may contain a related error message. ''' return self._runner.error @property def error_detail(self) -> str | None: ''' If the handler fails, may contain a traceback or other details. ''' return self._runner.error_detail @property def failed(self) -> bool: ''' ``True`` if the lifecycle callbacks failed to execute ''' return self._runner.failed # Public methods ---------------------------------------------------------- def url_path(self) -> str | None: ''' The last path component for the basename of the path to the callback module. ''' if self.failed: return None else: # TODO should fix invalid URL characters return '/' + os.path.splitext(os.path.basename(self._runner.path))[0] #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations # isort:skip import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from bokeh.models import Button, Div, Plot # Module under test from bokeh import events # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- concrete_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.Event) and v.event_name is not None} point_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.PointEvent)} #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class EventCallback: def __init__(self, attributes=[]) -> None: self.event_name = None self.attributes = attributes self.payload = {} def __call__(self, event): self.event_name = event.event_name self.payload = {attr:getattr(event, attr) for attr in self.attributes} def test_event_metaclass() -> None: # All events currently in the namespace should be in the EVENT_CLASSES set assert len(concrete_events - set(events._CONCRETE_EVENT_CLASSES.values())) == 0 def test_common_decode_json() -> None: for event_name, event_cls in events._CONCRETE_EVENT_CLASSES.items(): if event_name is None: continue # Skip abstract base class event = events.Event.decode_json({ 'event_name': event_cls.event_name, 'event_values': {'model': {'id': 'test-model-id'}}, }) assert isinstance(event, events.Event) if isinstance(event, events.ModelEvent): assert event._model_id == 'test-model-id' def test_pointevent_subclass_decode_json() -> None: event_values = dict(model_id='test-model-id', sx=3, sy=-2, x=10, y=100) for event_cls in point_events: if event_cls.event_name is None: continue # Skip abstract base class event = events.Event.decode_json({'event_name': event_cls.event_name, 'event_values': event_values.copy()}) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_panevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta_x=0.1, delta_y=0.3, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pan.event_name, 'event_values': event_values.copy()}) assert event.delta_x == 0.1 assert event.delta_y == 0.3 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_mousewheelevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta=-0.1, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.MouseWheel.event_name, 'event_values': event_values.copy()}) assert event.delta == -0.1 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_pinchevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, scale=42, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pinch.event_name, 'event_values': event_values.copy()}) assert event.scale == 42 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_event_constructor_button() -> None: model = Button() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_div() -> None: model = Div() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_plot() -> None: model = Plot() event = events.ModelEvent(model) assert event._model_id == model.id def test_buttonclick_constructor_button() -> None: model = Button() event = events.ButtonClick(model) assert event._model_id == model.id def test_buttonclick_constructor_div() -> None: with pytest.raises(ValueError): events.ButtonClick(Div()) def test_buttonclick_constructor_plot() -> None: with pytest.raises(ValueError): events.ButtonClick(Plot()) def test_lodstart_constructor_button() -> None: with pytest.raises(ValueError): events.LODStart(Button()) def test_lodstart_constructor_div() -> None: with pytest.raises(ValueError): events.LODStart(Div()) def test_lodstart_constructor_plot() -> None: model = Plot() event = events.LODStart(model) assert event._model_id == model.id def test_lodend_constructor_button() -> None: with pytest.raises(ValueError): events.LODEnd(Button()) def test_lodend_constructor_div() -> None: with pytest.raises(ValueError): events.LODEnd(Div()) def test_lodend_constructor_plot() -> None: model = Plot() event = events.LODEnd(model) assert event._model_id == model.id def test_plotevent_constructor_button() -> None: with pytest.raises(ValueError): events.PlotEvent(Button()) def test_plotevent_constructor_div() -> None: with pytest.raises(ValueError): events.PlotEvent(Div()) def test_plotevent_constructor_plot() -> None: model = Plot() event = events.PlotEvent(model) assert event._model_id == model.id def test_pointEvent_constructor_plot() -> None: model = Plot() event = events.PointEvent(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_constructor_button() -> None: with pytest.raises(ValueError): events.PointEvent(Button(), sx=3, sy=-2, x=10, y=100) def test_pointevent_constructor_div() -> None: with pytest.raises(ValueError): events.PointEvent(Div(), sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_plot() -> None: model = Plot() for subcls in point_events: event = subcls(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_subclass_constructor_button() -> None: model = Button() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_div() -> None: model = Div() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) # Testing event callback invocation def test_buttonclick_event_callbacks() -> None: button = Button() test_callback = EventCallback() button.on_event(events.ButtonClick, test_callback) assert test_callback.event_name == None button._trigger_event(events.ButtonClick(button)) assert test_callback.event_name == events.ButtonClick.event_name def test_atomic_plot_event_callbacks() -> None: plot = Plot() for event_cls in [events.LODStart, events.LODEnd]: test_callback = EventCallback() plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot)) assert test_callback.event_name == event_cls.event_name def test_pointevent_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100) for event_cls in point_events: test_callback = EventCallback(['sx','sy','x','y']) plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot, **payload)) assert test_callback.event_name == event_cls.event_name assert test_callback.payload == payload def test_mousewheel_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta=5) test_callback = EventCallback(['sx','sy','x','y', 'delta']) plot.on_event(events.MouseWheel, test_callback) assert test_callback.event_name == None plot._trigger_event(events.MouseWheel(plot, **payload)) assert test_callback.event_name == events.MouseWheel.event_name assert test_callback.payload == payload def test_pan_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta_x=2, delta_y=3.2) test_callback = EventCallback(['sx','sy','x','y', 'delta_x', 'delta_y']) plot.on_event(events.Pan, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pan(plot, **payload)) assert test_callback.event_name == events.Pan.event_name assert test_callback.payload == payload def test_pinch_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, scale=42) test_callback = EventCallback(['sx','sy','x','y', 'scale']) plot.on_event(events.Pinch, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pinch(plot, **payload)) assert test_callback.event_name == events.Pinch.event_name assert test_callback.payload == payload #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
bokeh/bokeh
tests/unit/bokeh/test_events.py
bokeh/application/handlers/server_lifecycle.py
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Generate new secret keys that can be used by the Bokeh server to cryptographically sign session IDs. To generate a new secret key for use with Bokeh server, execute .. code-block:: sh bokeh secret on the command line. The key will be printed to standard output. The secret key can be provided to the ``bokeh serve`` command with the ``BOKEH_SECRET_KEY`` environment variable. .. warning:: You must keep the secret secret! Protect it like a root password. ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports from argparse import Namespace # Bokeh imports from bokeh.util.token import generate_secret_key # Bokeh imports from ..subcommand import Subcommand #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'Secret', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class Secret(Subcommand): ''' Subcommand to generate a new secret key. ''' #: name for this subcommand name = "secret" help = "Create a Bokeh secret key for use with Bokeh server" def invoke(self, args: Namespace) -> None: ''' ''' key = generate_secret_key() # suppress LGTM, since the intent is precisesly to output a secret print(key) # lgtm [py/clear-text-logging-sensitive-data] #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations # isort:skip import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from bokeh.models import Button, Div, Plot # Module under test from bokeh import events # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- concrete_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.Event) and v.event_name is not None} point_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.PointEvent)} #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class EventCallback: def __init__(self, attributes=[]) -> None: self.event_name = None self.attributes = attributes self.payload = {} def __call__(self, event): self.event_name = event.event_name self.payload = {attr:getattr(event, attr) for attr in self.attributes} def test_event_metaclass() -> None: # All events currently in the namespace should be in the EVENT_CLASSES set assert len(concrete_events - set(events._CONCRETE_EVENT_CLASSES.values())) == 0 def test_common_decode_json() -> None: for event_name, event_cls in events._CONCRETE_EVENT_CLASSES.items(): if event_name is None: continue # Skip abstract base class event = events.Event.decode_json({ 'event_name': event_cls.event_name, 'event_values': {'model': {'id': 'test-model-id'}}, }) assert isinstance(event, events.Event) if isinstance(event, events.ModelEvent): assert event._model_id == 'test-model-id' def test_pointevent_subclass_decode_json() -> None: event_values = dict(model_id='test-model-id', sx=3, sy=-2, x=10, y=100) for event_cls in point_events: if event_cls.event_name is None: continue # Skip abstract base class event = events.Event.decode_json({'event_name': event_cls.event_name, 'event_values': event_values.copy()}) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_panevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta_x=0.1, delta_y=0.3, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pan.event_name, 'event_values': event_values.copy()}) assert event.delta_x == 0.1 assert event.delta_y == 0.3 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_mousewheelevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta=-0.1, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.MouseWheel.event_name, 'event_values': event_values.copy()}) assert event.delta == -0.1 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_pinchevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, scale=42, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pinch.event_name, 'event_values': event_values.copy()}) assert event.scale == 42 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_event_constructor_button() -> None: model = Button() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_div() -> None: model = Div() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_plot() -> None: model = Plot() event = events.ModelEvent(model) assert event._model_id == model.id def test_buttonclick_constructor_button() -> None: model = Button() event = events.ButtonClick(model) assert event._model_id == model.id def test_buttonclick_constructor_div() -> None: with pytest.raises(ValueError): events.ButtonClick(Div()) def test_buttonclick_constructor_plot() -> None: with pytest.raises(ValueError): events.ButtonClick(Plot()) def test_lodstart_constructor_button() -> None: with pytest.raises(ValueError): events.LODStart(Button()) def test_lodstart_constructor_div() -> None: with pytest.raises(ValueError): events.LODStart(Div()) def test_lodstart_constructor_plot() -> None: model = Plot() event = events.LODStart(model) assert event._model_id == model.id def test_lodend_constructor_button() -> None: with pytest.raises(ValueError): events.LODEnd(Button()) def test_lodend_constructor_div() -> None: with pytest.raises(ValueError): events.LODEnd(Div()) def test_lodend_constructor_plot() -> None: model = Plot() event = events.LODEnd(model) assert event._model_id == model.id def test_plotevent_constructor_button() -> None: with pytest.raises(ValueError): events.PlotEvent(Button()) def test_plotevent_constructor_div() -> None: with pytest.raises(ValueError): events.PlotEvent(Div()) def test_plotevent_constructor_plot() -> None: model = Plot() event = events.PlotEvent(model) assert event._model_id == model.id def test_pointEvent_constructor_plot() -> None: model = Plot() event = events.PointEvent(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_constructor_button() -> None: with pytest.raises(ValueError): events.PointEvent(Button(), sx=3, sy=-2, x=10, y=100) def test_pointevent_constructor_div() -> None: with pytest.raises(ValueError): events.PointEvent(Div(), sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_plot() -> None: model = Plot() for subcls in point_events: event = subcls(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_subclass_constructor_button() -> None: model = Button() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_div() -> None: model = Div() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) # Testing event callback invocation def test_buttonclick_event_callbacks() -> None: button = Button() test_callback = EventCallback() button.on_event(events.ButtonClick, test_callback) assert test_callback.event_name == None button._trigger_event(events.ButtonClick(button)) assert test_callback.event_name == events.ButtonClick.event_name def test_atomic_plot_event_callbacks() -> None: plot = Plot() for event_cls in [events.LODStart, events.LODEnd]: test_callback = EventCallback() plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot)) assert test_callback.event_name == event_cls.event_name def test_pointevent_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100) for event_cls in point_events: test_callback = EventCallback(['sx','sy','x','y']) plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot, **payload)) assert test_callback.event_name == event_cls.event_name assert test_callback.payload == payload def test_mousewheel_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta=5) test_callback = EventCallback(['sx','sy','x','y', 'delta']) plot.on_event(events.MouseWheel, test_callback) assert test_callback.event_name == None plot._trigger_event(events.MouseWheel(plot, **payload)) assert test_callback.event_name == events.MouseWheel.event_name assert test_callback.payload == payload def test_pan_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta_x=2, delta_y=3.2) test_callback = EventCallback(['sx','sy','x','y', 'delta_x', 'delta_y']) plot.on_event(events.Pan, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pan(plot, **payload)) assert test_callback.event_name == events.Pan.event_name assert test_callback.payload == payload def test_pinch_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, scale=42) test_callback = EventCallback(['sx','sy','x','y', 'scale']) plot.on_event(events.Pinch, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pinch(plot, **payload)) assert test_callback.event_name == events.Pinch.event_name assert test_callback.payload == payload #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
bokeh/bokeh
tests/unit/bokeh/test_events.py
bokeh/command/subcommands/secret.py
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Utilities for checking dependencies ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports from importlib import import_module from types import ModuleType #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'import_optional', 'import_required', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- def import_optional(mod_name: str) -> ModuleType | None: ''' Attempt to import an optional dependency. Silently returns None if the requested module is not available. Args: mod_name (str) : name of the optional module to try to import Returns: imported module or None, if import fails ''' try: return import_module(mod_name) except ImportError: pass except Exception: msg = f"Failed to import optional module `{mod_name}`" log.exception(msg) return None def import_required(mod_name: str, error_msg: str) -> ModuleType: ''' Attempt to import a required dependency. Raises a RuntimeError if the requested module is not available. Args: mod_name (str) : name of the required module to try to import error_msg (str) : error message to raise when the module is missing Returns: imported module Raises: RuntimeError ''' try: return import_module(mod_name) except ImportError as e: raise RuntimeError(error_msg) from e #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations # isort:skip import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from bokeh.models import Button, Div, Plot # Module under test from bokeh import events # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- concrete_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.Event) and v.event_name is not None} point_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.PointEvent)} #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class EventCallback: def __init__(self, attributes=[]) -> None: self.event_name = None self.attributes = attributes self.payload = {} def __call__(self, event): self.event_name = event.event_name self.payload = {attr:getattr(event, attr) for attr in self.attributes} def test_event_metaclass() -> None: # All events currently in the namespace should be in the EVENT_CLASSES set assert len(concrete_events - set(events._CONCRETE_EVENT_CLASSES.values())) == 0 def test_common_decode_json() -> None: for event_name, event_cls in events._CONCRETE_EVENT_CLASSES.items(): if event_name is None: continue # Skip abstract base class event = events.Event.decode_json({ 'event_name': event_cls.event_name, 'event_values': {'model': {'id': 'test-model-id'}}, }) assert isinstance(event, events.Event) if isinstance(event, events.ModelEvent): assert event._model_id == 'test-model-id' def test_pointevent_subclass_decode_json() -> None: event_values = dict(model_id='test-model-id', sx=3, sy=-2, x=10, y=100) for event_cls in point_events: if event_cls.event_name is None: continue # Skip abstract base class event = events.Event.decode_json({'event_name': event_cls.event_name, 'event_values': event_values.copy()}) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_panevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta_x=0.1, delta_y=0.3, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pan.event_name, 'event_values': event_values.copy()}) assert event.delta_x == 0.1 assert event.delta_y == 0.3 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_mousewheelevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta=-0.1, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.MouseWheel.event_name, 'event_values': event_values.copy()}) assert event.delta == -0.1 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_pinchevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, scale=42, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pinch.event_name, 'event_values': event_values.copy()}) assert event.scale == 42 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_event_constructor_button() -> None: model = Button() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_div() -> None: model = Div() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_plot() -> None: model = Plot() event = events.ModelEvent(model) assert event._model_id == model.id def test_buttonclick_constructor_button() -> None: model = Button() event = events.ButtonClick(model) assert event._model_id == model.id def test_buttonclick_constructor_div() -> None: with pytest.raises(ValueError): events.ButtonClick(Div()) def test_buttonclick_constructor_plot() -> None: with pytest.raises(ValueError): events.ButtonClick(Plot()) def test_lodstart_constructor_button() -> None: with pytest.raises(ValueError): events.LODStart(Button()) def test_lodstart_constructor_div() -> None: with pytest.raises(ValueError): events.LODStart(Div()) def test_lodstart_constructor_plot() -> None: model = Plot() event = events.LODStart(model) assert event._model_id == model.id def test_lodend_constructor_button() -> None: with pytest.raises(ValueError): events.LODEnd(Button()) def test_lodend_constructor_div() -> None: with pytest.raises(ValueError): events.LODEnd(Div()) def test_lodend_constructor_plot() -> None: model = Plot() event = events.LODEnd(model) assert event._model_id == model.id def test_plotevent_constructor_button() -> None: with pytest.raises(ValueError): events.PlotEvent(Button()) def test_plotevent_constructor_div() -> None: with pytest.raises(ValueError): events.PlotEvent(Div()) def test_plotevent_constructor_plot() -> None: model = Plot() event = events.PlotEvent(model) assert event._model_id == model.id def test_pointEvent_constructor_plot() -> None: model = Plot() event = events.PointEvent(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_constructor_button() -> None: with pytest.raises(ValueError): events.PointEvent(Button(), sx=3, sy=-2, x=10, y=100) def test_pointevent_constructor_div() -> None: with pytest.raises(ValueError): events.PointEvent(Div(), sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_plot() -> None: model = Plot() for subcls in point_events: event = subcls(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_subclass_constructor_button() -> None: model = Button() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_div() -> None: model = Div() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) # Testing event callback invocation def test_buttonclick_event_callbacks() -> None: button = Button() test_callback = EventCallback() button.on_event(events.ButtonClick, test_callback) assert test_callback.event_name == None button._trigger_event(events.ButtonClick(button)) assert test_callback.event_name == events.ButtonClick.event_name def test_atomic_plot_event_callbacks() -> None: plot = Plot() for event_cls in [events.LODStart, events.LODEnd]: test_callback = EventCallback() plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot)) assert test_callback.event_name == event_cls.event_name def test_pointevent_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100) for event_cls in point_events: test_callback = EventCallback(['sx','sy','x','y']) plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot, **payload)) assert test_callback.event_name == event_cls.event_name assert test_callback.payload == payload def test_mousewheel_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta=5) test_callback = EventCallback(['sx','sy','x','y', 'delta']) plot.on_event(events.MouseWheel, test_callback) assert test_callback.event_name == None plot._trigger_event(events.MouseWheel(plot, **payload)) assert test_callback.event_name == events.MouseWheel.event_name assert test_callback.payload == payload def test_pan_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta_x=2, delta_y=3.2) test_callback = EventCallback(['sx','sy','x','y', 'delta_x', 'delta_y']) plot.on_event(events.Pan, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pan(plot, **payload)) assert test_callback.event_name == events.Pan.event_name assert test_callback.payload == payload def test_pinch_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, scale=42) test_callback = EventCallback(['sx','sy','x','y', 'scale']) plot.on_event(events.Pinch, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pinch(plot, **payload)) assert test_callback.event_name == events.Pinch.event_name assert test_callback.payload == payload #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
bokeh/bokeh
tests/unit/bokeh/test_events.py
bokeh/util/dependencies.py
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Provides ``PropertyCallbackManager`` and ``EventCallbackManager`` mixin classes for adding ``on_change`` and ``on_event`` callback interfaces to classes. ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports from inspect import signature from typing import ( TYPE_CHECKING, Callable, Dict, List, Type, ) # Bokeh imports from ..core.types import Unknown from ..events import Event from ..util.functions import get_param_info if TYPE_CHECKING: from ..core.has_props import Setter from ..document import Document from ..document.events import DocumentPatchedEvent #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'EventCallbackManager', 'PropertyCallbackManager', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- EventCallback = Callable[[Event], None] PropertyCallback = Callable[[str, Unknown, Unknown], None] class EventCallbackManager: ''' A mixin class to provide an interface for registering and triggering event callbacks on the Python side. ''' _event_callbacks: Dict[str, List[EventCallback]] _document: Document | None def __init__(self, *args, **kw) -> None: super().__init__(*args, **kw) self._event_callbacks = {} def on_event(self, event: str | Type[Event], *callbacks: EventCallback) -> None: ''' Run callbacks when the specified event occurs on this Model Not all Events are supported for all Models. See specific Events in :ref:`bokeh.events` for more information on which Models are able to trigger them. ''' if not isinstance(event, str) and issubclass(event, Event): event = event.event_name for callback in callbacks: if _nargs(callback) != 0: _check_callback(callback, ('event',), what='Event callback') if event not in self._event_callbacks: self._event_callbacks[event] = [cb for cb in callbacks] else: self._event_callbacks[event].extend(callbacks) if event not in self.subscribed_events: self.subscribed_events.append(event) def _trigger_event(self, event: Event) -> None: def invoke() -> None: for callback in self._event_callbacks.get(event.event_name,[]): if event._model_id is not None and self.id == event._model_id: if _nargs(callback) == 0: callback() else: callback(event) # TODO: here we might mirror the property callbacks and have something # like Document._notify_event which creates an *internal* Bokeh event # (for the user event, confusing!) that then dispatches in the document # and applies curdoc wrapper there. However, most of that machinery is # to support the bi-directionality of property changes. Currently (user) # events only run from client to server. Would like to see if some of the # internal eventing can be reduced or simplified in general before # plugging more into it. For now, just handle the curdoc bits here. if hasattr(self, '_document') and self._document is not None: self._document._with_self_as_curdoc(invoke) else: invoke() def _update_event_callbacks(self) -> None: if self.document is None: return for key in self._event_callbacks: self.document._subscribed_models[key].add(self) class PropertyCallbackManager: ''' A mixin class to provide an interface for registering and triggering callbacks. ''' _callbacks: Dict[str, List[PropertyCallback]] _document: Document | None def __init__(self, *args, **kw) -> None: super().__init__(*args, **kw) self._callbacks = {} def on_change(self, attr: str, *callbacks: PropertyCallback) -> None: ''' Add a callback on this object to trigger when ``attr`` changes. Args: attr (str) : an attribute name on this object callback (callable) : a callback function to register Returns: None ''' if len(callbacks) == 0: raise ValueError("on_change takes an attribute name and one or more callbacks, got only one parameter") _callbacks = self._callbacks.setdefault(attr, []) for callback in callbacks: if callback in _callbacks: continue _check_callback(callback, ('attr', 'old', 'new')) _callbacks.append(callback) def remove_on_change(self, attr: str, *callbacks: PropertyCallback) -> None: ''' Remove a callback from this object ''' if len(callbacks) == 0: raise ValueError("remove_on_change takes an attribute name and one or more callbacks, got only one parameter") _callbacks = self._callbacks.setdefault(attr, []) for callback in callbacks: _callbacks.remove(callback) def trigger(self, attr: str, old: Unknown, new: Unknown, hint: DocumentPatchedEvent | None = None, setter: Setter | None = None) -> None: ''' Trigger callbacks for ``attr`` on this object. Args: attr (str) : old (object) : new (object) : Returns: None ''' def invoke(): callbacks = self._callbacks.get(attr) if callbacks: for callback in callbacks: callback(attr, old, new) if hasattr(self, '_document') and self._document is not None: self._document._notify_change(self, attr, old, new, hint, setter, invoke) else: invoke() #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- def _nargs(fn): sig = signature(fn) all_names, default_values = get_param_info(sig) return len(all_names) - len(default_values) def _check_callback(callback, fargs, what="Callback functions"): '''Bokeh-internal function to check callback signature''' sig = signature(callback) formatted_args = str(sig) error_msg = what + " must have signature func(%s), got func%s" all_names, default_values = get_param_info(sig) nargs = len(all_names) - len(default_values) if nargs != len(fargs): raise ValueError(error_msg % (", ".join(fargs), formatted_args)) #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations # isort:skip import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from bokeh.models import Button, Div, Plot # Module under test from bokeh import events # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- concrete_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.Event) and v.event_name is not None} point_events = {v for v in globals().values() if isinstance(v, type) and issubclass(v, events.PointEvent)} #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class EventCallback: def __init__(self, attributes=[]) -> None: self.event_name = None self.attributes = attributes self.payload = {} def __call__(self, event): self.event_name = event.event_name self.payload = {attr:getattr(event, attr) for attr in self.attributes} def test_event_metaclass() -> None: # All events currently in the namespace should be in the EVENT_CLASSES set assert len(concrete_events - set(events._CONCRETE_EVENT_CLASSES.values())) == 0 def test_common_decode_json() -> None: for event_name, event_cls in events._CONCRETE_EVENT_CLASSES.items(): if event_name is None: continue # Skip abstract base class event = events.Event.decode_json({ 'event_name': event_cls.event_name, 'event_values': {'model': {'id': 'test-model-id'}}, }) assert isinstance(event, events.Event) if isinstance(event, events.ModelEvent): assert event._model_id == 'test-model-id' def test_pointevent_subclass_decode_json() -> None: event_values = dict(model_id='test-model-id', sx=3, sy=-2, x=10, y=100) for event_cls in point_events: if event_cls.event_name is None: continue # Skip abstract base class event = events.Event.decode_json({'event_name': event_cls.event_name, 'event_values': event_values.copy()}) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_panevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta_x=0.1, delta_y=0.3, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pan.event_name, 'event_values': event_values.copy()}) assert event.delta_x == 0.1 assert event.delta_y == 0.3 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_mousewheelevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, delta=-0.1, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.MouseWheel.event_name, 'event_values': event_values.copy()}) assert event.delta == -0.1 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_pinchevent_decode_json() -> None: event_values = dict(model={'id': 'test-model-id'}, scale=42, sx=3, sy=-2, x=10, y=100) event = events.Event.decode_json({'event_name': events.Pinch.event_name, 'event_values': event_values.copy()}) assert event.scale == 42 assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == 'test-model-id' def test_event_constructor_button() -> None: model = Button() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_div() -> None: model = Div() event = events.ModelEvent(model) assert event._model_id == model.id def test_event_constructor_plot() -> None: model = Plot() event = events.ModelEvent(model) assert event._model_id == model.id def test_buttonclick_constructor_button() -> None: model = Button() event = events.ButtonClick(model) assert event._model_id == model.id def test_buttonclick_constructor_div() -> None: with pytest.raises(ValueError): events.ButtonClick(Div()) def test_buttonclick_constructor_plot() -> None: with pytest.raises(ValueError): events.ButtonClick(Plot()) def test_lodstart_constructor_button() -> None: with pytest.raises(ValueError): events.LODStart(Button()) def test_lodstart_constructor_div() -> None: with pytest.raises(ValueError): events.LODStart(Div()) def test_lodstart_constructor_plot() -> None: model = Plot() event = events.LODStart(model) assert event._model_id == model.id def test_lodend_constructor_button() -> None: with pytest.raises(ValueError): events.LODEnd(Button()) def test_lodend_constructor_div() -> None: with pytest.raises(ValueError): events.LODEnd(Div()) def test_lodend_constructor_plot() -> None: model = Plot() event = events.LODEnd(model) assert event._model_id == model.id def test_plotevent_constructor_button() -> None: with pytest.raises(ValueError): events.PlotEvent(Button()) def test_plotevent_constructor_div() -> None: with pytest.raises(ValueError): events.PlotEvent(Div()) def test_plotevent_constructor_plot() -> None: model = Plot() event = events.PlotEvent(model) assert event._model_id == model.id def test_pointEvent_constructor_plot() -> None: model = Plot() event = events.PointEvent(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_constructor_button() -> None: with pytest.raises(ValueError): events.PointEvent(Button(), sx=3, sy=-2, x=10, y=100) def test_pointevent_constructor_div() -> None: with pytest.raises(ValueError): events.PointEvent(Div(), sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_plot() -> None: model = Plot() for subcls in point_events: event = subcls(model, sx=3, sy=-2, x=10, y=100) assert event.sx == 3 assert event.sy == -2 assert event.x == 10 assert event.y == 100 assert event._model_id == model.id def test_pointevent_subclass_constructor_button() -> None: model = Button() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) def test_pointevent_subclass_constructor_div() -> None: model = Div() for subcls in point_events: with pytest.raises(ValueError): subcls(model, sx=3, sy=-2, x=10, y=100) # Testing event callback invocation def test_buttonclick_event_callbacks() -> None: button = Button() test_callback = EventCallback() button.on_event(events.ButtonClick, test_callback) assert test_callback.event_name == None button._trigger_event(events.ButtonClick(button)) assert test_callback.event_name == events.ButtonClick.event_name def test_atomic_plot_event_callbacks() -> None: plot = Plot() for event_cls in [events.LODStart, events.LODEnd]: test_callback = EventCallback() plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot)) assert test_callback.event_name == event_cls.event_name def test_pointevent_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100) for event_cls in point_events: test_callback = EventCallback(['sx','sy','x','y']) plot.on_event(event_cls, test_callback) assert test_callback.event_name == None plot._trigger_event(event_cls(plot, **payload)) assert test_callback.event_name == event_cls.event_name assert test_callback.payload == payload def test_mousewheel_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta=5) test_callback = EventCallback(['sx','sy','x','y', 'delta']) plot.on_event(events.MouseWheel, test_callback) assert test_callback.event_name == None plot._trigger_event(events.MouseWheel(plot, **payload)) assert test_callback.event_name == events.MouseWheel.event_name assert test_callback.payload == payload def test_pan_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, delta_x=2, delta_y=3.2) test_callback = EventCallback(['sx','sy','x','y', 'delta_x', 'delta_y']) plot.on_event(events.Pan, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pan(plot, **payload)) assert test_callback.event_name == events.Pan.event_name assert test_callback.payload == payload def test_pinch_callbacks() -> None: plot = Plot() payload = dict(sx=3, sy=-2, x=10, y=100, scale=42) test_callback = EventCallback(['sx','sy','x','y', 'scale']) plot.on_event(events.Pinch, test_callback) assert test_callback.event_name == None plot._trigger_event(events.Pinch(plot, **payload)) assert test_callback.event_name == events.Pinch.event_name assert test_callback.payload == payload #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
bokeh/bokeh
tests/unit/bokeh/test_events.py
bokeh/util/callback_manager.py
"""Switches on Zigbee Home Automation networks.""" from __future__ import annotations import functools from typing import Any from zigpy.zcl.clusters.general import OnOff from zigpy.zcl.foundation import Status from homeassistant.components.switch import DOMAIN, SwitchEntity from homeassistant.const import STATE_ON, STATE_UNAVAILABLE from homeassistant.core import State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core import discovery from .core.const import ( CHANNEL_ON_OFF, DATA_ZHA, DATA_ZHA_DISPATCHERS, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED, ) from .core.registries import ZHA_ENTITIES from .entity import ZhaEntity, ZhaGroupEntity STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN) GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, DOMAIN) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation switch from config entry.""" entities_to_create = hass.data[DATA_ZHA][DOMAIN] unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( discovery.async_add_entities, async_add_entities, entities_to_create ), ) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) class BaseSwitch(SwitchEntity): """Common base class for zha switches.""" def __init__(self, *args, **kwargs): """Initialize the ZHA switch.""" self._on_off_channel = None self._state = None super().__init__(*args, **kwargs) @property def is_on(self) -> bool: """Return if the switch is on based on the statemachine.""" if self._state is None: return False return self._state async def async_turn_on(self, **kwargs) -> None: """Turn the entity on.""" result = await self._on_off_channel.on() if not isinstance(result, list) or result[1] is not Status.SUCCESS: return self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs) -> None: """Turn the entity off.""" result = await self._on_off_channel.off() if not isinstance(result, list) or result[1] is not Status.SUCCESS: return self._state = False self.async_write_ha_state() @STRICT_MATCH(channel_names=CHANNEL_ON_OFF) class Switch(BaseSwitch, ZhaEntity): """ZHA switch.""" def __init__(self, unique_id, zha_device, channels, **kwargs): """Initialize the ZHA switch.""" super().__init__(unique_id, zha_device, channels, **kwargs) self._on_off_channel = self.cluster_channels.get(CHANNEL_ON_OFF) @callback def async_set_state(self, attr_id: int, attr_name: str, value: Any): """Handle state update from channel.""" self._state = bool(value) self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Run when about to be added to hass.""" await super().async_added_to_hass() self.async_accept_signal( self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_state ) @callback def async_restore_last_state(self, last_state) -> None: """Restore previous state.""" self._state = last_state.state == STATE_ON async def async_update(self) -> None: """Attempt to retrieve on off state from the switch.""" await super().async_update() if self._on_off_channel: state = await self._on_off_channel.get_attribute_value("on_off") if state is not None: self._state = state @GROUP_MATCH() class SwitchGroup(BaseSwitch, ZhaGroupEntity): """Representation of a switch group.""" def __init__( self, entity_ids: list[str], unique_id: str, group_id: int, zha_device, **kwargs ) -> None: """Initialize a switch group.""" super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) self._available: bool = False group = self.zha_device.gateway.get_group(self._group_id) self._on_off_channel = group.endpoint[OnOff.cluster_id] async def async_update(self) -> None: """Query all members and determine the light group state.""" all_states = [self.hass.states.get(x) for x in self._entity_ids] states: list[State] = list(filter(None, all_states)) on_states = [state for state in states if state.state == STATE_ON] self._state = len(on_states) > 0 self._available = any(state.state != STATE_UNAVAILABLE for state in states)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/zha/switch.py
"""Support for Frontier Silicon Devices (Medion, Hama, Auna,...).""" import logging from afsapi import AFSAPI import requests import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) SUPPORT_FRONTIER_SILICON = ( SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_SEEK | SUPPORT_PLAY_MEDIA | SUPPORT_PLAY | SUPPORT_STOP | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE ) DEFAULT_PORT = 80 DEFAULT_PASSWORD = "1234" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string, vol.Optional(CONF_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Frontier Silicon platform.""" if discovery_info is not None: async_add_entities( [AFSAPIDevice(discovery_info["ssdp_description"], DEFAULT_PASSWORD, None)], True, ) return True host = config.get(CONF_HOST) port = config.get(CONF_PORT) password = config.get(CONF_PASSWORD) name = config.get(CONF_NAME) try: async_add_entities( [AFSAPIDevice(f"http://{host}:{port}/device", password, name)], True ) _LOGGER.debug("FSAPI device %s:%s -> %s", host, port, password) return True except requests.exceptions.RequestException: _LOGGER.error( "Could not add the FSAPI device at %s:%s -> %s", host, port, password ) return False class AFSAPIDevice(MediaPlayerEntity): """Representation of a Frontier Silicon device on the network.""" def __init__(self, device_url, password, name): """Initialize the Frontier Silicon API device.""" self._device_url = device_url self._password = password self._state = None self._name = name self._title = None self._artist = None self._album_name = None self._mute = None self._source = None self._source_list = None self._media_image_url = None self._max_volume = None self._volume_level = None # Properties @property def fs_device(self): """ Create a fresh fsapi session. A new session is created for each request in case someone else connected to the device in between the updates and invalidated the existing session (i.e UNDOK). """ return AFSAPI(self._device_url, self._password) @property def name(self): """Return the device name.""" return self._name @property def media_title(self): """Title of current playing media.""" return self._title @property def media_artist(self): """Artist of current playing media, music track only.""" return self._artist @property def media_album_name(self): """Album name of current playing media, music track only.""" return self._album_name @property def media_content_type(self): """Content type of current playing media.""" return MEDIA_TYPE_MUSIC @property def supported_features(self): """Flag of media commands that are supported.""" return SUPPORT_FRONTIER_SILICON @property def state(self): """Return the state of the player.""" return self._state # source @property def source_list(self): """List of available input sources.""" return self._source_list @property def source(self): """Name of the current input source.""" return self._source @property def media_image_url(self): """Image url of current playing media.""" return self._media_image_url @property def volume_level(self): """Volume level of the media player (0..1).""" return self._volume_level async def async_update(self): """Get the latest date and update device state.""" fs_device = self.fs_device if not self._name: self._name = await fs_device.get_friendly_name() if not self._source_list: self._source_list = await fs_device.get_mode_list() # The API seems to include 'zero' in the number of steps (e.g. if the range is # 0-40 then get_volume_steps returns 41) subtract one to get the max volume. # If call to get_volume fails set to 0 and try again next time. if not self._max_volume: self._max_volume = int(await fs_device.get_volume_steps() or 1) - 1 if await fs_device.get_power(): status = await fs_device.get_play_status() self._state = { "playing": STATE_PLAYING, "paused": STATE_PAUSED, "stopped": STATE_IDLE, "unknown": STATE_UNKNOWN, None: STATE_IDLE, }.get(status, STATE_UNKNOWN) else: self._state = STATE_OFF if self._state != STATE_OFF: info_name = await fs_device.get_play_name() info_text = await fs_device.get_play_text() self._title = " - ".join(filter(None, [info_name, info_text])) self._artist = await fs_device.get_play_artist() self._album_name = await fs_device.get_play_album() self._source = await fs_device.get_mode() self._mute = await fs_device.get_mute() self._media_image_url = await fs_device.get_play_graphic() volume = await self.fs_device.get_volume() # Prevent division by zero if max_volume not known yet self._volume_level = float(volume or 0) / (self._max_volume or 1) else: self._title = None self._artist = None self._album_name = None self._source = None self._mute = None self._media_image_url = None self._volume_level = None # Management actions # power control async def async_turn_on(self): """Turn on the device.""" await self.fs_device.set_power(True) async def async_turn_off(self): """Turn off the device.""" await self.fs_device.set_power(False) async def async_media_play(self): """Send play command.""" await self.fs_device.play() async def async_media_pause(self): """Send pause command.""" await self.fs_device.pause() async def async_media_play_pause(self): """Send play/pause command.""" if "playing" in self._state: await self.fs_device.pause() else: await self.fs_device.play() async def async_media_stop(self): """Send play/pause command.""" await self.fs_device.pause() async def async_media_previous_track(self): """Send previous track command (results in rewind).""" await self.fs_device.rewind() async def async_media_next_track(self): """Send next track command (results in fast-forward).""" await self.fs_device.forward() # mute @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return self._mute async def async_mute_volume(self, mute): """Send mute command.""" await self.fs_device.set_mute(mute) # volume async def async_volume_up(self): """Send volume up command.""" volume = await self.fs_device.get_volume() volume = int(volume or 0) + 1 await self.fs_device.set_volume(min(volume, self._max_volume)) async def async_volume_down(self): """Send volume down command.""" volume = await self.fs_device.get_volume() volume = int(volume or 0) - 1 await self.fs_device.set_volume(max(volume, 0)) async def async_set_volume_level(self, volume): """Set volume command.""" if self._max_volume: # Can't do anything sensible if not set volume = int(volume * self._max_volume) await self.fs_device.set_volume(volume) async def async_select_source(self, source): """Select input source.""" await self.fs_device.set_mode(source)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/frontier_silicon/media_player.py
"""Support for AquaLogic devices.""" from datetime import timedelta import logging import threading import time from aqualogic.core import AquaLogic import voluptuous as vol from homeassistant.const import ( CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.helpers import config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "aqualogic" UPDATE_TOPIC = f"{DOMAIN}_update" CONF_UNIT = "unit" RECONNECT_INTERVAL = timedelta(seconds=10) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( {vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.port} ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up AquaLogic platform.""" host = config[DOMAIN][CONF_HOST] port = config[DOMAIN][CONF_PORT] processor = AquaLogicProcessor(hass, host, port) hass.data[DOMAIN] = processor hass.bus.listen_once(EVENT_HOMEASSISTANT_START, processor.start_listen) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, processor.shutdown) _LOGGER.debug("AquaLogicProcessor %s:%i initialized", host, port) return True class AquaLogicProcessor(threading.Thread): """AquaLogic event processor thread.""" def __init__(self, hass, host, port): """Initialize the data object.""" super().__init__(daemon=True) self._hass = hass self._host = host self._port = port self._shutdown = False self._panel = None def start_listen(self, event): """Start event-processing thread.""" _LOGGER.debug("Event processing thread started") self.start() def shutdown(self, event): """Signal shutdown of processing event.""" _LOGGER.debug("Event processing signaled exit") self._shutdown = True def data_changed(self, panel): """Aqualogic data changed callback.""" self._hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC) def run(self): """Event thread.""" while True: self._panel = AquaLogic() self._panel.connect(self._host, self._port) self._panel.process(self.data_changed) if self._shutdown: return _LOGGER.error("Connection to %s:%d lost", self._host, self._port) time.sleep(RECONNECT_INTERVAL.seconds) @property def panel(self): """Retrieve the AquaLogic object.""" return self._panel
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/aqualogic/__init__.py
"""Support for Azure DevOps.""" from __future__ import annotations import logging from typing import Any from aioazuredevops.client import DevOpsClient import aiohttp from homeassistant.components.azure_devops.const import ( CONF_ORG, CONF_PAT, CONF_PROJECT, DATA_AZURE_DEVOPS_CLIENT, DOMAIN, ) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType, HomeAssistantType _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up Azure DevOps from a config entry.""" client = DevOpsClient() try: if entry.data[CONF_PAT] is not None: await client.authorize(entry.data[CONF_PAT], entry.data[CONF_ORG]) if not client.authorized: _LOGGER.warning( "Could not authorize with Azure DevOps. You may need to update your token" ) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data, ) ) return False await client.get_project(entry.data[CONF_ORG], entry.data[CONF_PROJECT]) except aiohttp.ClientError as exception: _LOGGER.warning(exception) raise ConfigEntryNotReady from exception instance_key = f"{DOMAIN}_{entry.data[CONF_ORG]}_{entry.data[CONF_PROJECT]}" hass.data.setdefault(instance_key, {})[DATA_AZURE_DEVOPS_CLIENT] = client # Setup components hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor") ) return True async def async_unload_entry(hass: HomeAssistantType, entry: ConfigType) -> bool: """Unload Azure DevOps config entry.""" del hass.data[f"{DOMAIN}_{entry.data[CONF_ORG]}_{entry.data[CONF_PROJECT]}"] return await hass.config_entries.async_forward_entry_unload(entry, "sensor") class AzureDevOpsEntity(Entity): """Defines a base Azure DevOps entity.""" def __init__(self, organization: str, project: str, name: str, icon: str) -> None: """Initialize the Azure DevOps entity.""" self._name = name self._icon = icon self._available = True self.organization = organization self.project = project @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def icon(self) -> str: """Return the mdi icon of the entity.""" return self._icon @property def available(self) -> bool: """Return True if entity is available.""" return self._available async def async_update(self) -> None: """Update Azure DevOps entity.""" if await self._azure_devops_update(): self._available = True else: if self._available: _LOGGER.debug( "An error occurred while updating Azure DevOps sensor", exc_info=True, ) self._available = False async def _azure_devops_update(self) -> None: """Update Azure DevOps entity.""" raise NotImplementedError() class AzureDevOpsDeviceEntity(AzureDevOpsEntity): """Defines a Azure DevOps device entity.""" @property def device_info(self) -> dict[str, Any]: """Return device information about this Azure DevOps instance.""" return { "identifiers": { ( DOMAIN, self.organization, self.project, ) }, "manufacturer": self.organization, "name": self.project, "entry_type": "service", }
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/azure_devops/__init__.py
"""Blueprint models.""" from __future__ import annotations import asyncio import logging import pathlib import shutil from typing import Any from awesomeversion import AwesomeVersion import voluptuous as vol from voluptuous.humanize import humanize_error from homeassistant import loader from homeassistant.const import ( CONF_DEFAULT, CONF_DOMAIN, CONF_NAME, CONF_PATH, __version__, ) from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.util import yaml from .const import ( BLUEPRINT_FOLDER, CONF_BLUEPRINT, CONF_HOMEASSISTANT, CONF_INPUT, CONF_MIN_VERSION, CONF_SOURCE_URL, CONF_USE_BLUEPRINT, DOMAIN, ) from .errors import ( BlueprintException, FailedToLoad, FileAlreadyExists, InvalidBlueprint, InvalidBlueprintInputs, MissingInput, ) from .schemas import BLUEPRINT_INSTANCE_FIELDS, BLUEPRINT_SCHEMA class Blueprint: """Blueprint of a configuration structure.""" def __init__( self, data: dict, *, path: str | None = None, expected_domain: str | None = None, ) -> None: """Initialize a blueprint.""" try: data = self.data = BLUEPRINT_SCHEMA(data) except vol.Invalid as err: raise InvalidBlueprint(expected_domain, path, data, err) from err # In future, we will treat this as "incorrect" and allow to recover from this data_domain = data[CONF_BLUEPRINT][CONF_DOMAIN] if expected_domain is not None and data_domain != expected_domain: raise InvalidBlueprint( expected_domain, path or self.name, data, f"Found incorrect blueprint type {data_domain}, expected {expected_domain}", ) self.domain = data_domain missing = yaml.extract_inputs(data) - set(data[CONF_BLUEPRINT][CONF_INPUT]) if missing: raise InvalidBlueprint( data_domain, path or self.name, data, f"Missing input definition for {', '.join(missing)}", ) @property def name(self) -> str: """Return blueprint name.""" return self.data[CONF_BLUEPRINT][CONF_NAME] @property def inputs(self) -> dict: """Return blueprint inputs.""" return self.data[CONF_BLUEPRINT][CONF_INPUT] @property def metadata(self) -> dict: """Return blueprint metadata.""" return self.data[CONF_BLUEPRINT] def update_metadata(self, *, source_url: str | None = None) -> None: """Update metadata.""" if source_url is not None: self.data[CONF_BLUEPRINT][CONF_SOURCE_URL] = source_url def yaml(self) -> str: """Dump blueprint as YAML.""" return yaml.dump(self.data) @callback def validate(self) -> list[str] | None: """Test if the Home Assistant installation supports this blueprint. Return list of errors if not valid. """ errors = [] metadata = self.metadata min_version = metadata.get(CONF_HOMEASSISTANT, {}).get(CONF_MIN_VERSION) if min_version is not None and AwesomeVersion(__version__) < AwesomeVersion( min_version ): errors.append(f"Requires at least Home Assistant {min_version}") return errors or None class BlueprintInputs: """Inputs for a blueprint.""" def __init__( self, blueprint: Blueprint, config_with_inputs: dict[str, Any] ) -> None: """Instantiate a blueprint inputs object.""" self.blueprint = blueprint self.config_with_inputs = config_with_inputs @property def inputs(self): """Return the inputs.""" return self.config_with_inputs[CONF_USE_BLUEPRINT][CONF_INPUT] @property def inputs_with_default(self): """Return the inputs and fallback to defaults.""" no_input = set(self.blueprint.inputs) - set(self.inputs) inputs_with_default = dict(self.inputs) for inp in no_input: blueprint_input = self.blueprint.inputs[inp] if isinstance(blueprint_input, dict) and CONF_DEFAULT in blueprint_input: inputs_with_default[inp] = blueprint_input[CONF_DEFAULT] return inputs_with_default def validate(self) -> None: """Validate the inputs.""" missing = set(self.blueprint.inputs) - set(self.inputs_with_default) if missing: raise MissingInput(self.blueprint.domain, self.blueprint.name, missing) # In future we can see if entities are correct domain, areas exist etc # using the new selector helper. @callback def async_substitute(self) -> dict: """Get the blueprint value with the inputs substituted.""" processed = yaml.substitute(self.blueprint.data, self.inputs_with_default) combined = {**processed, **self.config_with_inputs} # From config_with_inputs combined.pop(CONF_USE_BLUEPRINT) # From blueprint combined.pop(CONF_BLUEPRINT) return combined class DomainBlueprints: """Blueprints for a specific domain.""" def __init__( self, hass: HomeAssistant, domain: str, logger: logging.Logger, ) -> None: """Initialize a domain blueprints instance.""" self.hass = hass self.domain = domain self.logger = logger self._blueprints = {} self._load_lock = asyncio.Lock() hass.data.setdefault(DOMAIN, {})[domain] = self @property def blueprint_folder(self) -> pathlib.Path: """Return the blueprint folder.""" return pathlib.Path(self.hass.config.path(BLUEPRINT_FOLDER, self.domain)) @callback def async_reset_cache(self) -> None: """Reset the blueprint cache.""" self._blueprints = {} def _load_blueprint(self, blueprint_path) -> Blueprint: """Load a blueprint.""" try: blueprint_data = yaml.load_yaml(self.blueprint_folder / blueprint_path) except FileNotFoundError as err: raise FailedToLoad( self.domain, blueprint_path, FileNotFoundError(f"Unable to find {blueprint_path}"), ) from err except HomeAssistantError as err: raise FailedToLoad(self.domain, blueprint_path, err) from err return Blueprint( blueprint_data, expected_domain=self.domain, path=blueprint_path ) def _load_blueprints(self) -> dict[str, Blueprint | BlueprintException]: """Load all the blueprints.""" blueprint_folder = pathlib.Path( self.hass.config.path(BLUEPRINT_FOLDER, self.domain) ) results = {} for blueprint_path in blueprint_folder.glob("**/*.yaml"): blueprint_path = str(blueprint_path.relative_to(blueprint_folder)) if self._blueprints.get(blueprint_path) is None: try: self._blueprints[blueprint_path] = self._load_blueprint( blueprint_path ) except BlueprintException as err: self._blueprints[blueprint_path] = None results[blueprint_path] = err continue results[blueprint_path] = self._blueprints[blueprint_path] return results async def async_get_blueprints( self, ) -> dict[str, Blueprint | BlueprintException]: """Get all the blueprints.""" async with self._load_lock: return await self.hass.async_add_executor_job(self._load_blueprints) async def async_get_blueprint(self, blueprint_path: str) -> Blueprint: """Get a blueprint.""" def load_from_cache(): """Load blueprint from cache.""" blueprint = self._blueprints[blueprint_path] if blueprint is None: raise FailedToLoad( self.domain, blueprint_path, FileNotFoundError(f"Unable to find {blueprint_path}"), ) return blueprint if blueprint_path in self._blueprints: return load_from_cache() async with self._load_lock: # Check it again if blueprint_path in self._blueprints: return load_from_cache() try: blueprint = await self.hass.async_add_executor_job( self._load_blueprint, blueprint_path ) except Exception: self._blueprints[blueprint_path] = None raise self._blueprints[blueprint_path] = blueprint return blueprint async def async_inputs_from_config( self, config_with_blueprint: dict ) -> BlueprintInputs: """Process a blueprint config.""" try: config_with_blueprint = BLUEPRINT_INSTANCE_FIELDS(config_with_blueprint) except vol.Invalid as err: raise InvalidBlueprintInputs( self.domain, humanize_error(config_with_blueprint, err) ) from err bp_conf = config_with_blueprint[CONF_USE_BLUEPRINT] blueprint = await self.async_get_blueprint(bp_conf[CONF_PATH]) inputs = BlueprintInputs(blueprint, config_with_blueprint) inputs.validate() return inputs async def async_remove_blueprint(self, blueprint_path: str) -> None: """Remove a blueprint file.""" path = self.blueprint_folder / blueprint_path await self.hass.async_add_executor_job(path.unlink) self._blueprints[blueprint_path] = None def _create_file(self, blueprint: Blueprint, blueprint_path: str) -> None: """Create blueprint file.""" path = pathlib.Path( self.hass.config.path(BLUEPRINT_FOLDER, self.domain, blueprint_path) ) if path.exists(): raise FileAlreadyExists(self.domain, blueprint_path) path.parent.mkdir(parents=True, exist_ok=True) path.write_text(blueprint.yaml()) async def async_add_blueprint( self, blueprint: Blueprint, blueprint_path: str ) -> None: """Add a blueprint.""" if not blueprint_path.endswith(".yaml"): blueprint_path = f"{blueprint_path}.yaml" await self.hass.async_add_executor_job( self._create_file, blueprint, blueprint_path ) self._blueprints[blueprint_path] = blueprint async def async_populate(self) -> None: """Create folder if it doesn't exist and populate with examples.""" integration = await loader.async_get_integration(self.hass, self.domain) def populate(): if self.blueprint_folder.exists(): return shutil.copytree( integration.file_path / BLUEPRINT_FOLDER, self.blueprint_folder / HA_DOMAIN, ) await self.hass.async_add_executor_job(populate)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/blueprint/models.py
"""Support for Genius Hub switch/outlet devices.""" from datetime import timedelta import voluptuous as vol from homeassistant.components.switch import DEVICE_CLASS_OUTLET, SwitchEntity from homeassistant.const import ATTR_ENTITY_ID from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.typing import ConfigType, HomeAssistantType from . import ATTR_DURATION, DOMAIN, GeniusZone GH_ON_OFF_ZONE = "on / off" SVC_SET_SWITCH_OVERRIDE = "set_switch_override" SET_SWITCH_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Optional(ATTR_DURATION): vol.All( cv.time_period, vol.Range(min=timedelta(minutes=5), max=timedelta(days=1)), ), } ) async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None ) -> None: """Set up the Genius Hub switch entities.""" if discovery_info is None: return broker = hass.data[DOMAIN]["broker"] async_add_entities( [ GeniusSwitch(broker, z) for z in broker.client.zone_objs if z.data["type"] == GH_ON_OFF_ZONE ] ) # Register custom services platform = entity_platform.current_platform.get() platform.async_register_entity_service( SVC_SET_SWITCH_OVERRIDE, SET_SWITCH_OVERRIDE_SCHEMA, "async_turn_on", ) class GeniusSwitch(GeniusZone, SwitchEntity): """Representation of a Genius Hub switch.""" @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_OUTLET @property def is_on(self) -> bool: """Return the current state of the on/off zone. The zone is considered 'on' if & only if it is override/on (e.g. timer/on is 'off'). """ return self._zone.data["mode"] == "override" and self._zone.data["setpoint"] async def async_turn_off(self, **kwargs) -> None: """Send the zone to Timer mode. The zone is deemed 'off' in this mode, although the plugs may actually be on. """ await self._zone.set_mode("timer") async def async_turn_on(self, **kwargs) -> None: """Set the zone to override/on ({'setpoint': true}) for x seconds.""" await self._zone.set_override(1, kwargs.get(ATTR_DURATION, 3600))
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/geniushub/switch.py
"""Support for exposing Home Assistant via Zeroconf.""" from __future__ import annotations from contextlib import suppress import fnmatch from functools import partial import ipaddress import logging import socket from typing import Any, TypedDict import voluptuous as vol from zeroconf import ( Error as ZeroconfError, InterfaceChoice, IPVersion, NonUniqueNameException, ServiceInfo, ServiceStateChange, Zeroconf, ) from homeassistant import util from homeassistant.const import ( EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, __version__, ) from homeassistant.core import Event, HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.singleton import singleton from homeassistant.loader import async_get_homekit, async_get_zeroconf from .models import HaServiceBrowser, HaZeroconf from .usage import install_multiple_zeroconf_catcher _LOGGER = logging.getLogger(__name__) DOMAIN = "zeroconf" ZEROCONF_TYPE = "_home-assistant._tcp.local." HOMEKIT_TYPES = [ "_hap._tcp.local.", # Thread based devices "_hap._udp.local.", ] CONF_DEFAULT_INTERFACE = "default_interface" CONF_IPV6 = "ipv6" DEFAULT_DEFAULT_INTERFACE = True DEFAULT_IPV6 = True HOMEKIT_PAIRED_STATUS_FLAG = "sf" HOMEKIT_MODEL = "md" # Property key=value has a max length of 255 # so we use 230 to leave space for key= MAX_PROPERTY_VALUE_LEN = 230 # Dns label max length MAX_NAME_LEN = 63 CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional( CONF_DEFAULT_INTERFACE, default=DEFAULT_DEFAULT_INTERFACE ): cv.boolean, vol.Optional(CONF_IPV6, default=DEFAULT_IPV6): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) class HaServiceInfo(TypedDict): """Prepared info from mDNS entries.""" host: str port: int | None hostname: str type: str name: str properties: dict[str, Any] @singleton(DOMAIN) async def async_get_instance(hass: HomeAssistant) -> HaZeroconf: """Zeroconf instance to be shared with other integrations that use it.""" return await _async_get_instance(hass) async def _async_get_instance(hass: HomeAssistant, **zcargs: Any) -> HaZeroconf: logging.getLogger("zeroconf").setLevel(logging.NOTSET) zeroconf = await hass.async_add_executor_job(partial(HaZeroconf, **zcargs)) install_multiple_zeroconf_catcher(zeroconf) def _stop_zeroconf(_event: Event) -> None: """Stop Zeroconf.""" zeroconf.ha_close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _stop_zeroconf) return zeroconf async def async_setup(hass: HomeAssistant, config: dict) -> bool: """Set up Zeroconf and make Home Assistant discoverable.""" zc_config = config.get(DOMAIN, {}) zc_args: dict = {} if zc_config.get(CONF_DEFAULT_INTERFACE, DEFAULT_DEFAULT_INTERFACE): zc_args["interfaces"] = InterfaceChoice.Default if not zc_config.get(CONF_IPV6, DEFAULT_IPV6): zc_args["ip_version"] = IPVersion.V4Only zeroconf = hass.data[DOMAIN] = await _async_get_instance(hass, **zc_args) async def _async_zeroconf_hass_start(_event: Event) -> None: """Expose Home Assistant on zeroconf when it starts. Wait till started or otherwise HTTP is not up and running. """ uuid = await hass.helpers.instance_id.async_get() await hass.async_add_executor_job( _register_hass_zc_service, hass, zeroconf, uuid ) async def _async_zeroconf_hass_started(_event: Event) -> None: """Start the service browser.""" await _async_start_zeroconf_browser(hass, zeroconf) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_zeroconf_hass_start) hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STARTED, _async_zeroconf_hass_started ) return True def _register_hass_zc_service( hass: HomeAssistant, zeroconf: HaZeroconf, uuid: str ) -> None: # Get instance UUID valid_location_name = _truncate_location_name_to_valid(hass.config.location_name) params = { "location_name": valid_location_name, "uuid": uuid, "version": __version__, "external_url": "", "internal_url": "", # Old base URL, for backward compatibility "base_url": "", # Always needs authentication "requires_api_password": True, } # Get instance URL's with suppress(NoURLAvailableError): params["external_url"] = get_url(hass, allow_internal=False) with suppress(NoURLAvailableError): params["internal_url"] = get_url(hass, allow_external=False) # Set old base URL based on external or internal params["base_url"] = params["external_url"] or params["internal_url"] host_ip = util.get_local_ip() try: host_ip_pton = socket.inet_pton(socket.AF_INET, host_ip) except OSError: host_ip_pton = socket.inet_pton(socket.AF_INET6, host_ip) _suppress_invalid_properties(params) info = ServiceInfo( ZEROCONF_TYPE, name=f"{valid_location_name}.{ZEROCONF_TYPE}", server=f"{uuid}.local.", addresses=[host_ip_pton], port=hass.http.server_port, properties=params, ) _LOGGER.info("Starting Zeroconf broadcast") try: zeroconf.register_service(info) except NonUniqueNameException: _LOGGER.error( "Home Assistant instance with identical name present in the local network" ) async def _async_start_zeroconf_browser( hass: HomeAssistant, zeroconf: HaZeroconf ) -> None: """Start the zeroconf browser.""" zeroconf_types = await async_get_zeroconf(hass) homekit_models = await async_get_homekit(hass) types = list(zeroconf_types) for hk_type in HOMEKIT_TYPES: if hk_type not in zeroconf_types: types.append(hk_type) def service_update( zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange, ) -> None: """Service state changed.""" nonlocal zeroconf_types nonlocal homekit_models if state_change == ServiceStateChange.Removed: return try: service_info = zeroconf.get_service_info(service_type, name) except ZeroconfError: _LOGGER.exception("Failed to get info for device %s", name) return if not service_info: # Prevent the browser thread from collapsing as # service_info can be None _LOGGER.debug("Failed to get info for device %s", name) return info = info_from_service(service_info) if not info: # Prevent the browser thread from collapsing _LOGGER.debug("Failed to get addresses for device %s", name) return _LOGGER.debug("Discovered new device %s %s", name, info) # If we can handle it as a HomeKit discovery, we do that here. if service_type in HOMEKIT_TYPES: discovery_was_forwarded = handle_homekit(hass, homekit_models, info) # Continue on here as homekit_controller # still needs to get updates on devices # so it can see when the 'c#' field is updated. # # We only send updates to homekit_controller # if the device is already paired in order to avoid # offering a second discovery for the same device if ( discovery_was_forwarded and HOMEKIT_PAIRED_STATUS_FLAG in info["properties"] ): try: # 0 means paired and not discoverable by iOS clients) if int(info["properties"][HOMEKIT_PAIRED_STATUS_FLAG]): return except ValueError: # HomeKit pairing status unknown # likely bad homekit data return if "name" in info: lowercase_name: str | None = info["name"].lower() else: lowercase_name = None if "macaddress" in info["properties"]: uppercase_mac: str | None = info["properties"]["macaddress"].upper() else: uppercase_mac = None # Not all homekit types are currently used for discovery # so not all service type exist in zeroconf_types for entry in zeroconf_types.get(service_type, []): if len(entry) > 1: if ( uppercase_mac is not None and "macaddress" in entry and not fnmatch.fnmatch(uppercase_mac, entry["macaddress"]) ): continue if ( lowercase_name is not None and "name" in entry and not fnmatch.fnmatch(lowercase_name, entry["name"]) ): continue hass.add_job( hass.config_entries.flow.async_init( entry["domain"], context={"source": DOMAIN}, data=info ) # type: ignore ) _LOGGER.debug("Starting Zeroconf browser") HaServiceBrowser(zeroconf, types, handlers=[service_update]) def handle_homekit( hass: HomeAssistant, homekit_models: dict[str, str], info: HaServiceInfo ) -> bool: """Handle a HomeKit discovery. Return if discovery was forwarded. """ model = None props = info["properties"] for key in props: if key.lower() == HOMEKIT_MODEL: model = props[key] break if model is None: return False for test_model in homekit_models: if ( model != test_model and not model.startswith(f"{test_model} ") and not model.startswith(f"{test_model}-") ): continue hass.add_job( hass.config_entries.flow.async_init( homekit_models[test_model], context={"source": "homekit"}, data=info ) # type: ignore ) return True return False def info_from_service(service: ServiceInfo) -> HaServiceInfo | None: """Return prepared info from mDNS entries.""" properties: dict[str, Any] = {"_raw": {}} for key, value in service.properties.items(): # See https://ietf.org/rfc/rfc6763.html#section-6.4 and # https://ietf.org/rfc/rfc6763.html#section-6.5 for expected encodings # for property keys and values try: key = key.decode("ascii") except UnicodeDecodeError: _LOGGER.debug( "Ignoring invalid key provided by [%s]: %s", service.name, key ) continue properties["_raw"][key] = value with suppress(UnicodeDecodeError): if isinstance(value, bytes): properties[key] = value.decode("utf-8") if not service.addresses: return None address = service.addresses[0] return { "host": str(ipaddress.ip_address(address)), "port": service.port, "hostname": service.server, "type": service.type, "name": service.name, "properties": properties, } def _suppress_invalid_properties(properties: dict) -> None: """Suppress any properties that will cause zeroconf to fail to startup.""" for prop, prop_value in properties.items(): if not isinstance(prop_value, str): continue if len(prop_value.encode("utf-8")) > MAX_PROPERTY_VALUE_LEN: _LOGGER.error( "The property '%s' was suppressed because it is longer than the maximum length of %d bytes: %s", prop, MAX_PROPERTY_VALUE_LEN, prop_value, ) properties[prop] = "" def _truncate_location_name_to_valid(location_name: str) -> str: """Truncate or return the location name usable for zeroconf.""" if len(location_name.encode("utf-8")) < MAX_NAME_LEN: return location_name _LOGGER.warning( "The location name was truncated because it is longer than the maximum length of %d bytes: %s", MAX_NAME_LEN, location_name, ) return location_name.encode("utf-8")[:MAX_NAME_LEN].decode("utf-8", "ignore")
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/zeroconf/__init__.py
"""Support for Nest devices.""" import asyncio import logging from google_nest_sdm.event import EventMessage from google_nest_sdm.exceptions import ( AuthException, ConfigurationException, GoogleNestException, ) from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber import voluptuous as vol from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry from homeassistant.const import ( CONF_BINARY_SENSORS, CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_MONITORED_CONDITIONS, CONF_SENSORS, CONF_STRUCTURE, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import ( aiohttp_client, config_entry_oauth2_flow, config_validation as cv, ) from . import api, config_flow from .const import DATA_SDM, DATA_SUBSCRIBER, DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN from .events import EVENT_NAME_MAP, NEST_EVENT from .legacy import async_setup_legacy, async_setup_legacy_entry _CONFIGURING = {} _LOGGER = logging.getLogger(__name__) CONF_PROJECT_ID = "project_id" CONF_SUBSCRIBER_ID = "subscriber_id" DATA_NEST_CONFIG = "nest_config" DATA_NEST_UNAVAILABLE = "nest_unavailable" NEST_SETUP_NOTIFICATION = "nest_setup" SENSOR_SCHEMA = vol.Schema( {vol.Optional(CONF_MONITORED_CONDITIONS): vol.All(cv.ensure_list)} ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_CLIENT_ID): cv.string, vol.Required(CONF_CLIENT_SECRET): cv.string, # Required to use the new API (optional for compatibility) vol.Optional(CONF_PROJECT_ID): cv.string, vol.Optional(CONF_SUBSCRIBER_ID): cv.string, # Config that only currently works on the old API vol.Optional(CONF_STRUCTURE): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_SENSORS): SENSOR_SCHEMA, vol.Optional(CONF_BINARY_SENSORS): SENSOR_SCHEMA, } ) }, extra=vol.ALLOW_EXTRA, ) # Platforms for SDM API PLATFORMS = ["sensor", "camera", "climate"] async def async_setup(hass: HomeAssistant, config: dict): """Set up Nest components with dispatch between old/new flows.""" hass.data[DOMAIN] = {} if DOMAIN not in config: return True if CONF_PROJECT_ID not in config[DOMAIN]: return await async_setup_legacy(hass, config) if CONF_SUBSCRIBER_ID not in config[DOMAIN]: _LOGGER.error("Configuration option '{CONF_SUBSCRIBER_ID}' required") return False # For setup of ConfigEntry below hass.data[DOMAIN][DATA_NEST_CONFIG] = config[DOMAIN] project_id = config[DOMAIN][CONF_PROJECT_ID] config_flow.NestFlowHandler.register_sdm_api(hass) config_flow.NestFlowHandler.async_register_implementation( hass, config_entry_oauth2_flow.LocalOAuth2Implementation( hass, DOMAIN, config[DOMAIN][CONF_CLIENT_ID], config[DOMAIN][CONF_CLIENT_SECRET], OAUTH2_AUTHORIZE.format(project_id=project_id), OAUTH2_TOKEN, ), ) return True class SignalUpdateCallback: """An EventCallback invoked when new events arrive from subscriber.""" def __init__(self, hass: HomeAssistant): """Initialize EventCallback.""" self._hass = hass async def async_handle_event(self, event_message: EventMessage): """Process an incoming EventMessage.""" if not event_message.resource_update_name: return device_id = event_message.resource_update_name events = event_message.resource_update_events if not events: return _LOGGER.debug("Event Update %s", events.keys()) device_registry = await self._hass.helpers.device_registry.async_get_registry() device_entry = device_registry.async_get_device({(DOMAIN, device_id)}) if not device_entry: return for event in events: event_type = EVENT_NAME_MAP.get(event) if not event_type: continue message = { "device_id": device_entry.id, "type": event_type, "timestamp": event_message.timestamp, } self._hass.bus.async_fire(NEST_EVENT, message) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Nest from a config entry with dispatch between old/new flows.""" if DATA_SDM not in entry.data: return await async_setup_legacy_entry(hass, entry) implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( hass, entry ) ) config = hass.data[DOMAIN][DATA_NEST_CONFIG] session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) auth = api.AsyncConfigEntryAuth( aiohttp_client.async_get_clientsession(hass), session, config[CONF_CLIENT_ID], config[CONF_CLIENT_SECRET], ) subscriber = GoogleNestSubscriber( auth, config[CONF_PROJECT_ID], config[CONF_SUBSCRIBER_ID] ) callback = SignalUpdateCallback(hass) subscriber.set_update_callback(callback.async_handle_event) try: await subscriber.start_async() except AuthException as err: _LOGGER.debug("Subscriber authentication error: %s", err) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data, ) ) return False except ConfigurationException as err: _LOGGER.error("Configuration error: %s", err) subscriber.stop_async() return False except GoogleNestException as err: if DATA_NEST_UNAVAILABLE not in hass.data[DOMAIN]: _LOGGER.error("Subscriber error: %s", err) hass.data[DOMAIN][DATA_NEST_UNAVAILABLE] = True subscriber.stop_async() raise ConfigEntryNotReady from err try: await subscriber.async_get_device_manager() except GoogleNestException as err: if DATA_NEST_UNAVAILABLE not in hass.data[DOMAIN]: _LOGGER.error("Device manager error: %s", err) hass.data[DOMAIN][DATA_NEST_UNAVAILABLE] = True subscriber.stop_async() raise ConfigEntryNotReady from err hass.data[DOMAIN].pop(DATA_NEST_UNAVAILABLE, None) hass.data[DOMAIN][DATA_SUBSCRIBER] = subscriber for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" if DATA_SDM not in entry.data: # Legacy API return True _LOGGER.debug("Stopping nest subscriber") subscriber = hass.data[DOMAIN][DATA_SUBSCRIBER] subscriber.stop_async() unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(DATA_SUBSCRIBER) hass.data[DOMAIN].pop(DATA_NEST_UNAVAILABLE, None) return unload_ok
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/nest/__init__.py
"""Provides a binary sensor which gets its values from a TCP socket.""" from homeassistant.components.binary_sensor import BinarySensorEntity from .sensor import CONF_VALUE_ON, PLATFORM_SCHEMA, TcpSensor PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({}) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the TCP binary sensor.""" add_entities([TcpBinarySensor(hass, config)]) class TcpBinarySensor(BinarySensorEntity, TcpSensor): """A binary sensor which is on when its state == CONF_VALUE_ON.""" required = (CONF_VALUE_ON,) @property def is_on(self): """Return true if the binary sensor is on.""" return self._state == self._config[CONF_VALUE_ON]
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/tcp/binary_sensor.py
"""Register an iFrame front end panel.""" import voluptuous as vol from homeassistant.const import CONF_ICON, CONF_URL import homeassistant.helpers.config_validation as cv DOMAIN = "panel_iframe" CONF_TITLE = "title" CONF_RELATIVE_URL_ERROR_MSG = "Invalid relative URL. Absolute path required." CONF_RELATIVE_URL_REGEX = r"\A/" CONF_REQUIRE_ADMIN = "require_admin" CONFIG_SCHEMA = vol.Schema( { DOMAIN: cv.schema_with_slug_keys( vol.Schema( { # pylint: disable=no-value-for-parameter vol.Optional(CONF_TITLE): cv.string, vol.Optional(CONF_ICON): cv.icon, vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, vol.Required(CONF_URL): vol.Any( vol.Match( CONF_RELATIVE_URL_REGEX, msg=CONF_RELATIVE_URL_ERROR_MSG ), vol.Url(), ), } ) ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the iFrame frontend panels.""" for url_path, info in config[DOMAIN].items(): hass.components.frontend.async_register_built_in_panel( "iframe", info.get(CONF_TITLE), info.get(CONF_ICON), url_path, {"url": info[CONF_URL]}, require_admin=info[CONF_REQUIRE_ADMIN], ) return True
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/panel_iframe/__init__.py
"""Support for lights through the SmartThings cloud API.""" from __future__ import annotations import asyncio from typing import Sequence from pysmartthings import Capability from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_HS_COLOR, ATTR_TRANSITION, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_TRANSITION, LightEntity, ) import homeassistant.util.color as color_util from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN async def async_setup_entry(hass, config_entry, async_add_entities): """Add lights for a config entry.""" broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id] async_add_entities( [ SmartThingsLight(device) for device in broker.devices.values() if broker.any_assigned(device.device_id, "light") ], True, ) def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None: """Return all capabilities supported if minimum required are present.""" supported = [ Capability.switch, Capability.switch_level, Capability.color_control, Capability.color_temperature, ] # Must be able to be turned on/off. if Capability.switch not in capabilities: return None # Must have one of these light_capabilities = [ Capability.color_control, Capability.color_temperature, Capability.switch_level, ] if any(capability in capabilities for capability in light_capabilities): return supported return None def convert_scale(value, value_scale, target_scale, round_digits=4): """Convert a value to a different scale.""" return round(value * target_scale / value_scale, round_digits) class SmartThingsLight(SmartThingsEntity, LightEntity): """Define a SmartThings Light.""" def __init__(self, device): """Initialize a SmartThingsLight.""" super().__init__(device) self._brightness = None self._color_temp = None self._hs_color = None self._supported_features = self._determine_features() def _determine_features(self): """Get features supported by the device.""" features = 0 # Brightness and transition if Capability.switch_level in self._device.capabilities: features |= SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION # Color Temperature if Capability.color_temperature in self._device.capabilities: features |= SUPPORT_COLOR_TEMP # Color if Capability.color_control in self._device.capabilities: features |= SUPPORT_COLOR return features async def async_turn_on(self, **kwargs) -> None: """Turn the light on.""" tasks = [] # Color temperature if self._supported_features & SUPPORT_COLOR_TEMP and ATTR_COLOR_TEMP in kwargs: tasks.append(self.async_set_color_temp(kwargs[ATTR_COLOR_TEMP])) # Color if self._supported_features & SUPPORT_COLOR and ATTR_HS_COLOR in kwargs: tasks.append(self.async_set_color(kwargs[ATTR_HS_COLOR])) if tasks: # Set temp/color first await asyncio.gather(*tasks) # Switch/brightness/transition if self._supported_features & SUPPORT_BRIGHTNESS and ATTR_BRIGHTNESS in kwargs: await self.async_set_level( kwargs[ATTR_BRIGHTNESS], kwargs.get(ATTR_TRANSITION, 0) ) else: await self._device.switch_on(set_status=True) # State is set optimistically in the commands above, therefore update # the entity state ahead of receiving the confirming push updates self.async_schedule_update_ha_state(True) async def async_turn_off(self, **kwargs) -> None: """Turn the light off.""" # Switch/transition if self._supported_features & SUPPORT_TRANSITION and ATTR_TRANSITION in kwargs: await self.async_set_level(0, int(kwargs[ATTR_TRANSITION])) else: await self._device.switch_off(set_status=True) # State is set optimistically in the commands above, therefore update # the entity state ahead of receiving the confirming push updates self.async_schedule_update_ha_state(True) async def async_update(self): """Update entity attributes when the device status has changed.""" # Brightness and transition if self._supported_features & SUPPORT_BRIGHTNESS: self._brightness = int( convert_scale(self._device.status.level, 100, 255, 0) ) # Color Temperature if self._supported_features & SUPPORT_COLOR_TEMP: self._color_temp = color_util.color_temperature_kelvin_to_mired( self._device.status.color_temperature ) # Color if self._supported_features & SUPPORT_COLOR: self._hs_color = ( convert_scale(self._device.status.hue, 100, 360), self._device.status.saturation, ) async def async_set_color(self, hs_color): """Set the color of the device.""" hue = convert_scale(float(hs_color[0]), 360, 100) hue = max(min(hue, 100.0), 0.0) saturation = max(min(float(hs_color[1]), 100.0), 0.0) await self._device.set_color(hue, saturation, set_status=True) async def async_set_color_temp(self, value: float): """Set the color temperature of the device.""" kelvin = color_util.color_temperature_mired_to_kelvin(value) kelvin = max(min(kelvin, 30000.0), 1.0) await self._device.set_color_temperature(kelvin, set_status=True) async def async_set_level(self, brightness: int, transition: int): """Set the brightness of the light over transition.""" level = int(convert_scale(brightness, 255, 100, 0)) # Due to rounding, set level to 1 (one) so we don't inadvertently # turn off the light when a low brightness is set. level = 1 if level == 0 and brightness > 0 else level level = max(min(level, 100), 0) duration = int(transition) await self._device.set_level(level, duration, set_status=True) @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def color_temp(self): """Return the CT color value in mireds.""" return self._color_temp @property def hs_color(self): """Return the hue and saturation color value [float, float].""" return self._hs_color @property def is_on(self) -> bool: """Return true if light is on.""" return self._device.status.switch @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # lowest kelvin found supported across 20+ handlers. return 500 # 2000K @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # highest kelvin found supported across 20+ handlers. return 111 # 9000K @property def supported_features(self) -> int: """Flag supported features.""" return self._supported_features
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/smartthings/light.py
"""Support for LimitlessLED bulbs.""" import logging from limitlessled import Color from limitlessled.bridge import Bridge from limitlessled.group.dimmer import DimmerGroup from limitlessled.group.rgbw import RgbwGroup from limitlessled.group.rgbww import RgbwwGroup from limitlessled.group.white import WhiteGroup from limitlessled.pipeline import Pipeline from limitlessled.presets import COLORLOOP import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_WHITE, FLASH_LONG, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, CONF_TYPE, STATE_ON import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.util.color import color_hs_to_RGB, color_temperature_mired_to_kelvin _LOGGER = logging.getLogger(__name__) CONF_BRIDGES = "bridges" CONF_GROUPS = "groups" CONF_NUMBER = "number" CONF_VERSION = "version" CONF_FADE = "fade" DEFAULT_LED_TYPE = "rgbw" DEFAULT_PORT = 5987 DEFAULT_TRANSITION = 0 DEFAULT_VERSION = 6 DEFAULT_FADE = False LED_TYPE = ["rgbw", "rgbww", "white", "bridge-led", "dimmer"] EFFECT_NIGHT = "night" MIN_SATURATION = 10 WHITE = [0, 0] SUPPORT_LIMITLESSLED_WHITE = ( SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_TRANSITION ) SUPPORT_LIMITLESSLED_DIMMER = SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION SUPPORT_LIMITLESSLED_RGB = ( SUPPORT_BRIGHTNESS | SUPPORT_EFFECT | SUPPORT_FLASH | SUPPORT_COLOR | SUPPORT_TRANSITION ) SUPPORT_LIMITLESSLED_RGBWW = ( SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_FLASH | SUPPORT_COLOR | SUPPORT_TRANSITION ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_BRIDGES): vol.All( cv.ensure_list, [ { vol.Required(CONF_HOST): cv.string, vol.Optional( CONF_VERSION, default=DEFAULT_VERSION ): cv.positive_int, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Required(CONF_GROUPS): vol.All( cv.ensure_list, [ { vol.Required(CONF_NAME): cv.string, vol.Optional( CONF_TYPE, default=DEFAULT_LED_TYPE ): vol.In(LED_TYPE), vol.Required(CONF_NUMBER): cv.positive_int, vol.Optional( CONF_FADE, default=DEFAULT_FADE ): cv.boolean, } ], ), } ], ) } ) def rewrite_legacy(config): """Rewrite legacy configuration to new format.""" bridges = config.get(CONF_BRIDGES, [config]) new_bridges = [] for bridge_conf in bridges: groups = [] if "groups" in bridge_conf: groups = bridge_conf["groups"] else: _LOGGER.warning("Legacy configuration format detected") for i in range(1, 5): name_key = "group_%d_name" % i if name_key in bridge_conf: groups.append( { "number": i, "type": bridge_conf.get( "group_%d_type" % i, DEFAULT_LED_TYPE ), "name": bridge_conf.get(name_key), } ) new_bridges.append( { "host": bridge_conf.get(CONF_HOST), "version": bridge_conf.get(CONF_VERSION), "port": bridge_conf.get(CONF_PORT), "groups": groups, } ) return {"bridges": new_bridges} def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the LimitlessLED lights.""" # Two legacy configuration formats are supported to maintain backwards # compatibility. config = rewrite_legacy(config) # Use the expanded configuration format. lights = [] for bridge_conf in config.get(CONF_BRIDGES): bridge = Bridge( bridge_conf.get(CONF_HOST), port=bridge_conf.get(CONF_PORT, DEFAULT_PORT), version=bridge_conf.get(CONF_VERSION, DEFAULT_VERSION), ) for group_conf in bridge_conf.get(CONF_GROUPS): group = bridge.add_group( group_conf.get(CONF_NUMBER), group_conf.get(CONF_NAME), group_conf.get(CONF_TYPE, DEFAULT_LED_TYPE), ) lights.append(LimitlessLEDGroup(group, {"fade": group_conf[CONF_FADE]})) add_entities(lights) def state(new_state): """State decorator. Specify True (turn on) or False (turn off). """ def decorator(function): """Set up the decorator function.""" def wrapper(self, **kwargs): """Wrap a group state change.""" pipeline = Pipeline() transition_time = DEFAULT_TRANSITION if self._effect == EFFECT_COLORLOOP: self.group.stop() self._effect = None # Set transition time. if ATTR_TRANSITION in kwargs: transition_time = int(kwargs[ATTR_TRANSITION]) # Do group type-specific work. function(self, transition_time, pipeline, **kwargs) # Update state. self._is_on = new_state self.group.enqueue(pipeline) self.schedule_update_ha_state() return wrapper return decorator class LimitlessLEDGroup(LightEntity, RestoreEntity): """Representation of a LimitessLED group.""" def __init__(self, group, config): """Initialize a group.""" if isinstance(group, WhiteGroup): self._supported = SUPPORT_LIMITLESSLED_WHITE self._effect_list = [EFFECT_NIGHT] elif isinstance(group, DimmerGroup): self._supported = SUPPORT_LIMITLESSLED_DIMMER self._effect_list = [] elif isinstance(group, RgbwGroup): self._supported = SUPPORT_LIMITLESSLED_RGB self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE] elif isinstance(group, RgbwwGroup): self._supported = SUPPORT_LIMITLESSLED_RGBWW self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE] self.group = group self.config = config self._is_on = False self._brightness = None self._temperature = None self._color = None self._effect = None async def async_added_to_hass(self): """Handle entity about to be added to hass event.""" await super().async_added_to_hass() last_state = await self.async_get_last_state() if last_state: self._is_on = last_state.state == STATE_ON self._brightness = last_state.attributes.get("brightness") self._temperature = last_state.attributes.get("color_temp") self._color = last_state.attributes.get("hs_color") @property def should_poll(self): """No polling needed.""" return False @property def assumed_state(self): """Return True because unable to access real state of the entity.""" return True @property def name(self): """Return the name of the group.""" return self.group.name @property def is_on(self): """Return true if device is on.""" return self._is_on @property def brightness(self): """Return the brightness property.""" if self._effect == EFFECT_NIGHT: return 1 return self._brightness @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" return 154 @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" return 370 @property def color_temp(self): """Return the temperature property.""" if self.hs_color is not None: return None return self._temperature @property def hs_color(self): """Return the color property.""" if self._effect == EFFECT_NIGHT: return None if self._color is None or self._color[1] == 0: return None return self._color @property def supported_features(self): """Flag supported features.""" return self._supported @property def effect(self): """Return the current effect for this light.""" return self._effect @property def effect_list(self): """Return the list of supported effects for this light.""" return self._effect_list # pylint: disable=arguments-differ @state(False) def turn_off(self, transition_time, pipeline, **kwargs): """Turn off a group.""" if self.config[CONF_FADE]: pipeline.transition(transition_time, brightness=0.0) pipeline.off() # pylint: disable=arguments-differ @state(True) def turn_on(self, transition_time, pipeline, **kwargs): """Turn on (or adjust property of) a group.""" # The night effect does not need a turned on light if kwargs.get(ATTR_EFFECT) == EFFECT_NIGHT: if EFFECT_NIGHT in self._effect_list: pipeline.night_light() self._effect = EFFECT_NIGHT return pipeline.on() # Set up transition. args = {} if self.config[CONF_FADE] and not self.is_on and self._brightness: args["brightness"] = self.limitlessled_brightness() if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] args["brightness"] = self.limitlessled_brightness() if ATTR_HS_COLOR in kwargs and self._supported & SUPPORT_COLOR: self._color = kwargs[ATTR_HS_COLOR] # White is a special case. if self._color[1] < MIN_SATURATION: pipeline.white() self._color = WHITE else: args["color"] = self.limitlessled_color() if ATTR_COLOR_TEMP in kwargs: if self._supported & SUPPORT_COLOR: pipeline.white() self._color = WHITE if self._supported & SUPPORT_COLOR_TEMP: self._temperature = kwargs[ATTR_COLOR_TEMP] args["temperature"] = self.limitlessled_temperature() if args: pipeline.transition(transition_time, **args) # Flash. if ATTR_FLASH in kwargs and self._supported & SUPPORT_FLASH: duration = 0 if kwargs[ATTR_FLASH] == FLASH_LONG: duration = 1 pipeline.flash(duration=duration) # Add effects. if ATTR_EFFECT in kwargs and self._effect_list: if kwargs[ATTR_EFFECT] == EFFECT_COLORLOOP: self._effect = EFFECT_COLORLOOP pipeline.append(COLORLOOP) if kwargs[ATTR_EFFECT] == EFFECT_WHITE: pipeline.white() self._color = WHITE def limitlessled_temperature(self): """Convert Home Assistant color temperature units to percentage.""" max_kelvin = color_temperature_mired_to_kelvin(self.min_mireds) min_kelvin = color_temperature_mired_to_kelvin(self.max_mireds) width = max_kelvin - min_kelvin kelvin = color_temperature_mired_to_kelvin(self._temperature) temperature = (kelvin - min_kelvin) / width return max(0, min(1, temperature)) def limitlessled_brightness(self): """Convert Home Assistant brightness units to percentage.""" return self._brightness / 255 def limitlessled_color(self): """Convert Home Assistant HS list to RGB Color tuple.""" return Color(*color_hs_to_RGB(*tuple(self._color)))
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/limitlessled/light.py
"""Support for MyQ-Enabled Garage Doors.""" import logging from pymyq.const import ( DEVICE_STATE as MYQ_DEVICE_STATE, DEVICE_STATE_ONLINE as MYQ_DEVICE_STATE_ONLINE, DEVICE_TYPE_GATE as MYQ_DEVICE_TYPE_GATE, KNOWN_MODELS, MANUFACTURER, ) from pymyq.errors import MyQError from homeassistant.components.cover import ( DEVICE_CLASS_GARAGE, DEVICE_CLASS_GATE, SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity, ) from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, MYQ_COORDINATOR, MYQ_GATEWAY, MYQ_TO_HASS _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up mysq covers.""" data = hass.data[DOMAIN][config_entry.entry_id] myq = data[MYQ_GATEWAY] coordinator = data[MYQ_COORDINATOR] async_add_entities( [MyQDevice(coordinator, device) for device in myq.covers.values()], True ) class MyQDevice(CoordinatorEntity, CoverEntity): """Representation of a MyQ cover.""" def __init__(self, coordinator, device): """Initialize with API object, device id.""" super().__init__(coordinator) self._device = device @property def device_class(self): """Define this cover as a garage door.""" device_type = self._device.device_type if device_type is not None and device_type == MYQ_DEVICE_TYPE_GATE: return DEVICE_CLASS_GATE return DEVICE_CLASS_GARAGE @property def name(self): """Return the name of the garage door if any.""" return self._device.name @property def available(self): """Return if the device is online.""" if not self.coordinator.last_update_success: return False # Not all devices report online so assume True if its missing return self._device.device_json[MYQ_DEVICE_STATE].get( MYQ_DEVICE_STATE_ONLINE, True ) @property def is_closed(self): """Return true if cover is closed, else False.""" return MYQ_TO_HASS.get(self._device.state) == STATE_CLOSED @property def is_closing(self): """Return if the cover is closing or not.""" return MYQ_TO_HASS.get(self._device.state) == STATE_CLOSING @property def is_open(self): """Return if the cover is opening or not.""" return MYQ_TO_HASS.get(self._device.state) == STATE_OPEN @property def is_opening(self): """Return if the cover is opening or not.""" return MYQ_TO_HASS.get(self._device.state) == STATE_OPENING @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE @property def unique_id(self): """Return a unique, Home Assistant friendly identifier for this entity.""" return self._device.device_id async def async_close_cover(self, **kwargs): """Issue close command to cover.""" if self.is_closing or self.is_closed: return try: wait_task = await self._device.close(wait_for_state=False) except MyQError as err: _LOGGER.error( "Closing of cover %s failed with error: %s", self._device.name, str(err) ) return # Write closing state to HASS self.async_write_ha_state() if not await wait_task: _LOGGER.error("Closing of cover %s failed", self._device.name) # Write final state to HASS self.async_write_ha_state() async def async_open_cover(self, **kwargs): """Issue open command to cover.""" if self.is_opening or self.is_open: return try: wait_task = await self._device.open(wait_for_state=False) except MyQError as err: _LOGGER.error( "Opening of cover %s failed with error: %s", self._device.name, str(err) ) return # Write opening state to HASS self.async_write_ha_state() if not await wait_task: _LOGGER.error("Opening of cover %s failed", self._device.name) # Write final state to HASS self.async_write_ha_state() @property def device_info(self): """Return the device_info of the device.""" device_info = { "identifiers": {(DOMAIN, self._device.device_id)}, "name": self._device.name, "manufacturer": MANUFACTURER, "sw_version": self._device.firmware_version, } model = KNOWN_MODELS.get(self._device.device_id[2:4]) if model: device_info["model"] = model if self._device.parent_device_id: device_info["via_device"] = (DOMAIN, self._device.parent_device_id) return device_info async def async_added_to_hass(self): """Subscribe to updates.""" self.async_on_remove( self.coordinator.async_add_listener(self.async_write_ha_state) )
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/myq/cover.py
"""Support for w800rf32 binary sensors.""" import logging import W800rf32 as w800 import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_DEVICES, CONF_NAME from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, event as evt from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.util import dt as dt_util from . import W800RF32_DEVICE _LOGGER = logging.getLogger(__name__) CONF_OFF_DELAY = "off_delay" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICES): { cv.string: vol.Schema( { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_OFF_DELAY): vol.All( cv.time_period, cv.positive_timedelta ), } ) } }, extra=vol.ALLOW_EXTRA, ) async def async_setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Binary Sensor platform to w800rf32.""" binary_sensors = [] # device_id --> "c1 or a3" X10 device. entity (type dictionary) # --> name, device_class etc for device_id, entity in config[CONF_DEVICES].items(): _LOGGER.debug( "Add %s w800rf32.binary_sensor (class %s)", entity[CONF_NAME], entity.get(CONF_DEVICE_CLASS), ) device = W800rf32BinarySensor( device_id, entity.get(CONF_NAME), entity.get(CONF_DEVICE_CLASS), entity.get(CONF_OFF_DELAY), ) binary_sensors.append(device) add_entities(binary_sensors) class W800rf32BinarySensor(BinarySensorEntity): """A representation of a w800rf32 binary sensor.""" def __init__(self, device_id, name, device_class=None, off_delay=None): """Initialize the w800rf32 sensor.""" self._signal = W800RF32_DEVICE.format(device_id) self._name = name self._device_class = device_class self._off_delay = off_delay self._state = False self._delay_listener = None @callback def _off_delay_listener(self, now): """Switch device off after a delay.""" self._delay_listener = None self.update_state(False) @property def name(self): """Return the device name.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def device_class(self): """Return the sensor class.""" return self._device_class @property def is_on(self): """Return true if the sensor state is True.""" return self._state @callback def binary_sensor_update(self, event): """Call for control updates from the w800rf32 gateway.""" if not isinstance(event, w800.W800rf32Event): return dev_id = event.device command = event.command _LOGGER.debug( "BinarySensor update (Device ID: %s Command %s ...)", dev_id, command ) # Update the w800rf32 device state if command in ("On", "Off"): is_on = command == "On" self.update_state(is_on) if self.is_on and self._off_delay is not None and self._delay_listener is None: self._delay_listener = evt.async_track_point_in_time( self.hass, self._off_delay_listener, dt_util.utcnow() + self._off_delay ) def update_state(self, state): """Update the state of the device.""" self._state = state self.async_write_ha_state() async def async_added_to_hass(self): """Register update callback.""" async_dispatcher_connect(self.hass, self._signal, self.binary_sensor_update)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/w800rf32/binary_sensor.py
"""Support for Freebox devices (Freebox v6 and Freebox mini 4K).""" from __future__ import annotations import logging from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import DATA_RATE_KILOBYTES_PER_SECOND from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.dt as dt_util from .const import ( CALL_SENSORS, CONNECTION_SENSORS, DISK_PARTITION_SENSORS, DOMAIN, SENSOR_DEVICE_CLASS, SENSOR_ICON, SENSOR_NAME, SENSOR_UNIT, TEMPERATURE_SENSOR_TEMPLATE, ) from .router import FreeboxRouter _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the sensors.""" router = hass.data[DOMAIN][entry.unique_id] entities = [] _LOGGER.debug( "%s - %s - %s temperature sensors", router.name, router.mac, len(router.sensors_temperature), ) for sensor_name in router.sensors_temperature: entities.append( FreeboxSensor( router, sensor_name, {**TEMPERATURE_SENSOR_TEMPLATE, SENSOR_NAME: f"Freebox {sensor_name}"}, ) ) for sensor_key in CONNECTION_SENSORS: entities.append( FreeboxSensor(router, sensor_key, CONNECTION_SENSORS[sensor_key]) ) for sensor_key in CALL_SENSORS: entities.append(FreeboxCallSensor(router, sensor_key, CALL_SENSORS[sensor_key])) _LOGGER.debug("%s - %s - %s disk(s)", router.name, router.mac, len(router.disks)) for disk in router.disks.values(): for partition in disk["partitions"]: for sensor_key in DISK_PARTITION_SENSORS: entities.append( FreeboxDiskSensor( router, disk, partition, sensor_key, DISK_PARTITION_SENSORS[sensor_key], ) ) async_add_entities(entities, True) class FreeboxSensor(SensorEntity): """Representation of a Freebox sensor.""" def __init__( self, router: FreeboxRouter, sensor_type: str, sensor: dict[str, any] ) -> None: """Initialize a Freebox sensor.""" self._state = None self._router = router self._sensor_type = sensor_type self._name = sensor[SENSOR_NAME] self._unit = sensor[SENSOR_UNIT] self._icon = sensor[SENSOR_ICON] self._device_class = sensor[SENSOR_DEVICE_CLASS] self._unique_id = f"{self._router.mac} {self._name}" @callback def async_update_state(self) -> None: """Update the Freebox sensor.""" state = self._router.sensors[self._sensor_type] if self._unit == DATA_RATE_KILOBYTES_PER_SECOND: self._state = round(state / 1000, 2) else: self._state = state @property def unique_id(self) -> str: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name.""" return self._name @property def state(self) -> str: """Return the state.""" return self._state @property def unit_of_measurement(self) -> str: """Return the unit.""" return self._unit @property def icon(self) -> str: """Return the icon.""" return self._icon @property def device_class(self) -> str: """Return the device_class.""" return self._device_class @property def device_info(self) -> dict[str, any]: """Return the device information.""" return self._router.device_info @property def should_poll(self) -> bool: """No polling needed.""" return False @callback def async_on_demand_update(self): """Update state.""" self.async_update_state() self.async_write_ha_state() async def async_added_to_hass(self): """Register state update callback.""" self.async_update_state() self.async_on_remove( async_dispatcher_connect( self.hass, self._router.signal_sensor_update, self.async_on_demand_update, ) ) class FreeboxCallSensor(FreeboxSensor): """Representation of a Freebox call sensor.""" def __init__( self, router: FreeboxRouter, sensor_type: str, sensor: dict[str, any] ) -> None: """Initialize a Freebox call sensor.""" super().__init__(router, sensor_type, sensor) self._call_list_for_type = [] @callback def async_update_state(self) -> None: """Update the Freebox call sensor.""" self._call_list_for_type = [] if self._router.call_list: for call in self._router.call_list: if not call["new"]: continue if call["type"] == self._sensor_type: self._call_list_for_type.append(call) self._state = len(self._call_list_for_type) @property def extra_state_attributes(self) -> dict[str, any]: """Return device specific state attributes.""" return { dt_util.utc_from_timestamp(call["datetime"]).isoformat(): call["name"] for call in self._call_list_for_type } class FreeboxDiskSensor(FreeboxSensor): """Representation of a Freebox disk sensor.""" def __init__( self, router: FreeboxRouter, disk: dict[str, any], partition: dict[str, any], sensor_type: str, sensor: dict[str, any], ) -> None: """Initialize a Freebox disk sensor.""" super().__init__(router, sensor_type, sensor) self._disk = disk self._partition = partition self._name = f"{partition['label']} {sensor[SENSOR_NAME]}" self._unique_id = f"{self._router.mac} {sensor_type} {self._disk['id']} {self._partition['id']}" @property def device_info(self) -> dict[str, any]: """Return the device information.""" return { "identifiers": {(DOMAIN, self._disk["id"])}, "name": f"Disk {self._disk['id']}", "model": self._disk["model"], "sw_version": self._disk["firmware"], "via_device": ( DOMAIN, self._router.mac, ), } @callback def async_update_state(self) -> None: """Update the Freebox disk sensor.""" self._state = round( self._partition["free_bytes"] * 100 / self._partition["total_bytes"], 2 )
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/freebox/sensor.py
"""Support for Spider switches.""" from homeassistant.components.switch import SwitchEntity from .const import DOMAIN async def async_setup_entry(hass, config, async_add_entities): """Initialize a Spider Power Plug.""" api = hass.data[DOMAIN][config.entry_id] async_add_entities( [ SpiderPowerPlug(api, entity) for entity in await hass.async_add_executor_job(api.get_power_plugs) ] ) class SpiderPowerPlug(SwitchEntity): """Representation of a Spider Power Plug.""" def __init__(self, api, power_plug): """Initialize the Spider Power Plug.""" self.api = api self.power_plug = power_plug @property def device_info(self): """Return the device_info of the device.""" return { "identifiers": {(DOMAIN, self.power_plug.id)}, "name": self.power_plug.name, "manufacturer": self.power_plug.manufacturer, "model": self.power_plug.model, } @property def unique_id(self): """Return the ID of this switch.""" return self.power_plug.id @property def name(self): """Return the name of the switch if any.""" return self.power_plug.name @property def current_power_w(self): """Return the current power usage in W.""" return round(self.power_plug.current_energy_consumption) @property def today_energy_kwh(self): """Return the current power usage in Kwh.""" return round(self.power_plug.today_energy_consumption / 1000, 2) @property def is_on(self): """Return true if switch is on. Standby is on.""" return self.power_plug.is_on @property def available(self): """Return true if switch is available.""" return self.power_plug.is_available def turn_on(self, **kwargs): """Turn device on.""" self.power_plug.turn_on() def turn_off(self, **kwargs): """Turn device off.""" self.power_plug.turn_off() def update(self): """Get the latest data.""" self.power_plug = self.api.get_power_plug(self.unique_id)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/spider/switch.py
"""Asuswrt status sensors.""" from __future__ import annotations import logging from numbers import Number from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import DATA_GIGABYTES, DATA_RATE_MEGABITS_PER_SECOND from homeassistant.helpers.typing import HomeAssistantType from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, ) from .const import ( DATA_ASUSWRT, DOMAIN, SENSOR_CONNECTED_DEVICE, SENSOR_RX_BYTES, SENSOR_RX_RATES, SENSOR_TX_BYTES, SENSOR_TX_RATES, ) from .router import KEY_COORDINATOR, KEY_SENSORS, AsusWrtRouter DEFAULT_PREFIX = "Asuswrt" SENSOR_DEVICE_CLASS = "device_class" SENSOR_ICON = "icon" SENSOR_NAME = "name" SENSOR_UNIT = "unit" SENSOR_FACTOR = "factor" SENSOR_DEFAULT_ENABLED = "default_enabled" UNIT_DEVICES = "Devices" CONNECTION_SENSORS = { SENSOR_CONNECTED_DEVICE: { SENSOR_NAME: "Devices Connected", SENSOR_UNIT: UNIT_DEVICES, SENSOR_FACTOR: 0, SENSOR_ICON: "mdi:router-network", SENSOR_DEVICE_CLASS: None, SENSOR_DEFAULT_ENABLED: True, }, SENSOR_RX_RATES: { SENSOR_NAME: "Download Speed", SENSOR_UNIT: DATA_RATE_MEGABITS_PER_SECOND, SENSOR_FACTOR: 125000, SENSOR_ICON: "mdi:download-network", SENSOR_DEVICE_CLASS: None, }, SENSOR_TX_RATES: { SENSOR_NAME: "Upload Speed", SENSOR_UNIT: DATA_RATE_MEGABITS_PER_SECOND, SENSOR_FACTOR: 125000, SENSOR_ICON: "mdi:upload-network", SENSOR_DEVICE_CLASS: None, }, SENSOR_RX_BYTES: { SENSOR_NAME: "Download", SENSOR_UNIT: DATA_GIGABYTES, SENSOR_FACTOR: 1000000000, SENSOR_ICON: "mdi:download", SENSOR_DEVICE_CLASS: None, }, SENSOR_TX_BYTES: { SENSOR_NAME: "Upload", SENSOR_UNIT: DATA_GIGABYTES, SENSOR_FACTOR: 1000000000, SENSOR_ICON: "mdi:upload", SENSOR_DEVICE_CLASS: None, }, } _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the sensors.""" router: AsusWrtRouter = hass.data[DOMAIN][entry.entry_id][DATA_ASUSWRT] entities = [] for sensor_data in router.sensors_coordinator.values(): coordinator = sensor_data[KEY_COORDINATOR] sensors = sensor_data[KEY_SENSORS] for sensor_key in sensors: if sensor_key in CONNECTION_SENSORS: entities.append( AsusWrtSensor( coordinator, router, sensor_key, CONNECTION_SENSORS[sensor_key] ) ) async_add_entities(entities, True) class AsusWrtSensor(CoordinatorEntity, SensorEntity): """Representation of a AsusWrt sensor.""" def __init__( self, coordinator: DataUpdateCoordinator, router: AsusWrtRouter, sensor_type: str, sensor: dict[str, any], ) -> None: """Initialize a AsusWrt sensor.""" super().__init__(coordinator) self._router = router self._sensor_type = sensor_type self._name = f"{DEFAULT_PREFIX} {sensor[SENSOR_NAME]}" self._unique_id = f"{DOMAIN} {self._name}" self._unit = sensor[SENSOR_UNIT] self._factor = sensor[SENSOR_FACTOR] self._icon = sensor[SENSOR_ICON] self._device_class = sensor[SENSOR_DEVICE_CLASS] self._default_enabled = sensor.get(SENSOR_DEFAULT_ENABLED, False) @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return self._default_enabled @property def state(self) -> str: """Return current state.""" state = self.coordinator.data.get(self._sensor_type) if state is None: return None if self._factor and isinstance(state, Number): return round(state / self._factor, 2) return state @property def unique_id(self) -> str: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name.""" return self._name @property def unit_of_measurement(self) -> str: """Return the unit.""" return self._unit @property def icon(self) -> str: """Return the icon.""" return self._icon @property def device_class(self) -> str: """Return the device_class.""" return self._device_class @property def extra_state_attributes(self) -> dict[str, any]: """Return the attributes.""" return {"hostname": self._router.host} @property def device_info(self) -> dict[str, any]: """Return the device information.""" return self._router.device_info
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/asuswrt/sensor.py
"""Support for exposing NX584 elements as sensors.""" import logging import threading import time from nx584 import client as nx584_client import requests import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASS_OPENING, DEVICE_CLASSES, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_HOST, CONF_PORT import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_EXCLUDE_ZONES = "exclude_zones" CONF_ZONE_TYPES = "zone_types" DEFAULT_HOST = "localhost" DEFAULT_PORT = "5007" DEFAULT_SSL = False ZONE_TYPES_SCHEMA = vol.Schema({cv.positive_int: vol.In(DEVICE_CLASSES)}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_EXCLUDE_ZONES, default=[]): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the NX584 binary sensor platform.""" host = config.get(CONF_HOST) port = config.get(CONF_PORT) exclude = config.get(CONF_EXCLUDE_ZONES) zone_types = config.get(CONF_ZONE_TYPES) try: client = nx584_client.Client(f"http://{host}:{port}") zones = client.list_zones() except requests.exceptions.ConnectionError as ex: _LOGGER.error("Unable to connect to NX584: %s", str(ex)) return False version = [int(v) for v in client.get_version().split(".")] if version < [1, 1]: _LOGGER.error("NX584 is too old to use for sensors (>=0.2 required)") return False zone_sensors = { zone["number"]: NX584ZoneSensor( zone, zone_types.get(zone["number"], DEVICE_CLASS_OPENING) ) for zone in zones if zone["number"] not in exclude } if zone_sensors: add_entities(zone_sensors.values()) watcher = NX584Watcher(client, zone_sensors) watcher.start() else: _LOGGER.warning("No zones found on NX584") return True class NX584ZoneSensor(BinarySensorEntity): """Representation of a NX584 zone as a sensor.""" def __init__(self, zone, zone_type): """Initialize the nx594 binary sensor.""" self._zone = zone self._zone_type = zone_type @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return self._zone_type @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the name of the binary sensor.""" return self._zone["name"] @property def is_on(self): """Return true if the binary sensor is on.""" # True means "faulted" or "open" or "abnormal state" return self._zone["state"] @property def extra_state_attributes(self): """Return the state attributes.""" return {"zone_number": self._zone["number"]} class NX584Watcher(threading.Thread): """Event listener thread to process NX584 events.""" def __init__(self, client, zone_sensors): """Initialize NX584 watcher thread.""" super().__init__() self.daemon = True self._client = client self._zone_sensors = zone_sensors def _process_zone_event(self, event): zone = event["zone"] zone_sensor = self._zone_sensors.get(zone) # pylint: disable=protected-access if not zone_sensor: return zone_sensor._zone["state"] = event["zone_state"] zone_sensor.schedule_update_ha_state() def _process_events(self, events): for event in events: if event.get("type") == "zone_status": self._process_zone_event(event) def _run(self): """Throw away any existing events so we don't replay history.""" self._client.get_events() while True: events = self._client.get_events() if events: self._process_events(events) def run(self): """Run the watcher.""" while True: try: self._run() except requests.exceptions.ConnectionError: _LOGGER.error("Failed to reach NX584 server") time.sleep(10)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/nx584/binary_sensor.py
"""Representation of Z-Wave binary sensors.""" from __future__ import annotations import logging from typing import Callable, TypedDict from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import CommandClass from homeassistant.components.binary_sensor import ( DEVICE_CLASS_BATTERY, DEVICE_CLASS_DOOR, DEVICE_CLASS_GAS, DEVICE_CLASS_HEAT, DEVICE_CLASS_LOCK, DEVICE_CLASS_MOISTURE, DEVICE_CLASS_MOTION, DEVICE_CLASS_PROBLEM, DEVICE_CLASS_SAFETY, DEVICE_CLASS_SMOKE, DEVICE_CLASS_SOUND, DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .const import DATA_CLIENT, DATA_UNSUBSCRIBE, DOMAIN from .discovery import ZwaveDiscoveryInfo from .entity import ZWaveBaseEntity LOGGER = logging.getLogger(__name__) NOTIFICATION_SMOKE_ALARM = 1 NOTIFICATION_CARBON_MONOOXIDE = 2 NOTIFICATION_CARBON_DIOXIDE = 3 NOTIFICATION_HEAT = 4 NOTIFICATION_WATER = 5 NOTIFICATION_ACCESS_CONTROL = 6 NOTIFICATION_HOME_SECURITY = 7 NOTIFICATION_POWER_MANAGEMENT = 8 NOTIFICATION_SYSTEM = 9 NOTIFICATION_EMERGENCY = 10 NOTIFICATION_CLOCK = 11 NOTIFICATION_APPLIANCE = 12 NOTIFICATION_HOME_HEALTH = 13 NOTIFICATION_SIREN = 14 NOTIFICATION_WATER_VALVE = 15 NOTIFICATION_WEATHER = 16 NOTIFICATION_IRRIGATION = 17 NOTIFICATION_GAS = 18 class NotificationSensorMapping(TypedDict, total=False): """Represent a notification sensor mapping dict type.""" type: int # required states: list[str] device_class: str enabled: bool # Mappings for Notification sensors # https://github.com/zwave-js/node-zwave-js/blob/master/packages/config/config/notifications.json NOTIFICATION_SENSOR_MAPPINGS: list[NotificationSensorMapping] = [ { # NotificationType 1: Smoke Alarm - State Id's 1 and 2 - Smoke detected "type": NOTIFICATION_SMOKE_ALARM, "states": ["1", "2"], "device_class": DEVICE_CLASS_SMOKE, }, { # NotificationType 1: Smoke Alarm - All other State Id's "type": NOTIFICATION_SMOKE_ALARM, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 2: Carbon Monoxide - State Id's 1 and 2 "type": NOTIFICATION_CARBON_MONOOXIDE, "states": ["1", "2"], "device_class": DEVICE_CLASS_GAS, }, { # NotificationType 2: Carbon Monoxide - All other State Id's "type": NOTIFICATION_CARBON_MONOOXIDE, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 3: Carbon Dioxide - State Id's 1 and 2 "type": NOTIFICATION_CARBON_DIOXIDE, "states": ["1", "2"], "device_class": DEVICE_CLASS_GAS, }, { # NotificationType 3: Carbon Dioxide - All other State Id's "type": NOTIFICATION_CARBON_DIOXIDE, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 4: Heat - State Id's 1, 2, 5, 6 (heat/underheat) "type": NOTIFICATION_HEAT, "states": ["1", "2", "5", "6"], "device_class": DEVICE_CLASS_HEAT, }, { # NotificationType 4: Heat - All other State Id's "type": NOTIFICATION_HEAT, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 5: Water - State Id's 1, 2, 3, 4 "type": NOTIFICATION_WATER, "states": ["1", "2", "3", "4"], "device_class": DEVICE_CLASS_MOISTURE, }, { # NotificationType 5: Water - All other State Id's "type": NOTIFICATION_WATER, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 6: Access Control - State Id's 1, 2, 3, 4 (Lock) "type": NOTIFICATION_ACCESS_CONTROL, "states": ["1", "2", "3", "4"], "device_class": DEVICE_CLASS_LOCK, }, { # NotificationType 6: Access Control - State Id 16 (door/window open) "type": NOTIFICATION_ACCESS_CONTROL, "states": ["22"], "device_class": DEVICE_CLASS_DOOR, }, { # NotificationType 6: Access Control - State Id 17 (door/window closed) "type": NOTIFICATION_ACCESS_CONTROL, "states": ["23"], "enabled": False, }, { # NotificationType 7: Home Security - State Id's 1, 2 (intrusion) "type": NOTIFICATION_HOME_SECURITY, "states": ["1", "2"], "device_class": DEVICE_CLASS_SAFETY, }, { # NotificationType 7: Home Security - State Id's 3, 4, 9 (tampering) "type": NOTIFICATION_HOME_SECURITY, "states": ["3", "4", "9"], "device_class": DEVICE_CLASS_SAFETY, }, { # NotificationType 7: Home Security - State Id's 5, 6 (glass breakage) "type": NOTIFICATION_HOME_SECURITY, "states": ["5", "6"], "device_class": DEVICE_CLASS_SAFETY, }, { # NotificationType 7: Home Security - State Id's 7, 8 (motion) "type": NOTIFICATION_HOME_SECURITY, "states": ["7", "8"], "device_class": DEVICE_CLASS_MOTION, }, { # NotificationType 9: System - State Id's 1, 2, 6, 7 "type": NOTIFICATION_SYSTEM, "states": ["1", "2", "6", "7"], "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 10: Emergency - State Id's 1, 2, 3 "type": NOTIFICATION_EMERGENCY, "states": ["1", "2", "3"], "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 14: Siren "type": NOTIFICATION_SIREN, "states": ["1"], "device_class": DEVICE_CLASS_SOUND, }, { # NotificationType 18: Gas "type": NOTIFICATION_GAS, "states": ["1", "2", "3", "4"], "device_class": DEVICE_CLASS_GAS, }, { # NotificationType 18: Gas "type": NOTIFICATION_GAS, "states": ["6"], "device_class": DEVICE_CLASS_PROBLEM, }, ] PROPERTY_DOOR_STATUS = "doorStatus" class PropertySensorMapping(TypedDict, total=False): """Represent a property sensor mapping dict type.""" property_name: str # required on_states: list[str] # required device_class: str enabled: bool # Mappings for property sensors PROPERTY_SENSOR_MAPPINGS: list[PropertySensorMapping] = [ { "property_name": PROPERTY_DOOR_STATUS, "on_states": ["open"], "device_class": DEVICE_CLASS_DOOR, "enabled": True, }, ] async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable ) -> None: """Set up Z-Wave binary sensor from config entry.""" client: ZwaveClient = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT] @callback def async_add_binary_sensor(info: ZwaveDiscoveryInfo) -> None: """Add Z-Wave Binary Sensor.""" entities: list[BinarySensorEntity] = [] if info.platform_hint == "notification": # Get all sensors from Notification CC states for state_key in info.primary_value.metadata.states: # ignore idle key (0) if state_key == "0": continue entities.append( ZWaveNotificationBinarySensor(config_entry, client, info, state_key) ) elif info.platform_hint == "property": entities.append(ZWavePropertyBinarySensor(config_entry, client, info)) else: # boolean sensor entities.append(ZWaveBooleanBinarySensor(config_entry, client, info)) async_add_entities(entities) hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append( async_dispatcher_connect( hass, f"{DOMAIN}_{config_entry.entry_id}_add_{BINARY_SENSOR_DOMAIN}", async_add_binary_sensor, ) ) class ZWaveBooleanBinarySensor(ZWaveBaseEntity, BinarySensorEntity): """Representation of a Z-Wave binary_sensor.""" def __init__( self, config_entry: ConfigEntry, client: ZwaveClient, info: ZwaveDiscoveryInfo, ) -> None: """Initialize a ZWaveBooleanBinarySensor entity.""" super().__init__(config_entry, client, info) self._name = self.generate_name(include_value_name=True) @property def is_on(self) -> bool | None: """Return if the sensor is on or off.""" if self.info.primary_value.value is None: return None return bool(self.info.primary_value.value) @property def device_class(self) -> str | None: """Return device class.""" if self.info.primary_value.command_class == CommandClass.BATTERY: return DEVICE_CLASS_BATTERY return None @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" # Legacy binary sensors are phased out (replaced by notification sensors) # Disable by default to not confuse users return bool( self.info.primary_value.command_class != CommandClass.SENSOR_BINARY or self.info.node.device_class.generic.key == 0x20 ) class ZWaveNotificationBinarySensor(ZWaveBaseEntity, BinarySensorEntity): """Representation of a Z-Wave binary_sensor from Notification CommandClass.""" def __init__( self, config_entry: ConfigEntry, client: ZwaveClient, info: ZwaveDiscoveryInfo, state_key: str, ) -> None: """Initialize a ZWaveNotificationBinarySensor entity.""" super().__init__(config_entry, client, info) self.state_key = state_key self._name = self.generate_name( include_value_name=True, alternate_value_name=self.info.primary_value.property_name, additional_info=[self.info.primary_value.metadata.states[self.state_key]], ) # check if we have a custom mapping for this value self._mapping_info = self._get_sensor_mapping() @property def is_on(self) -> bool | None: """Return if the sensor is on or off.""" if self.info.primary_value.value is None: return None return int(self.info.primary_value.value) == int(self.state_key) @property def device_class(self) -> str | None: """Return device class.""" return self._mapping_info.get("device_class") @property def unique_id(self) -> str: """Return unique id for this entity.""" return f"{super().unique_id}.{self.state_key}" @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" if not self._mapping_info: return True return self._mapping_info.get("enabled", True) @callback def _get_sensor_mapping(self) -> NotificationSensorMapping: """Try to get a device specific mapping for this sensor.""" for mapping in NOTIFICATION_SENSOR_MAPPINGS: if ( mapping["type"] != self.info.primary_value.metadata.cc_specific["notificationType"] ): continue if not mapping.get("states") or self.state_key in mapping["states"]: # match found return mapping return {} class ZWavePropertyBinarySensor(ZWaveBaseEntity, BinarySensorEntity): """Representation of a Z-Wave binary_sensor from a property.""" def __init__( self, config_entry: ConfigEntry, client: ZwaveClient, info: ZwaveDiscoveryInfo ) -> None: """Initialize a ZWavePropertyBinarySensor entity.""" super().__init__(config_entry, client, info) # check if we have a custom mapping for this value self._mapping_info = self._get_sensor_mapping() self._name = self.generate_name(include_value_name=True) @property def is_on(self) -> bool | None: """Return if the sensor is on or off.""" if self.info.primary_value.value is None: return None return self.info.primary_value.value in self._mapping_info["on_states"] @property def device_class(self) -> str | None: """Return device class.""" return self._mapping_info.get("device_class") @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" # We hide some more advanced sensors by default to not overwhelm users # unless explicitly stated in a mapping, assume deisabled by default return self._mapping_info.get("enabled", False) @callback def _get_sensor_mapping(self) -> PropertySensorMapping: """Try to get a device specific mapping for this sensor.""" mapping_info = PropertySensorMapping() for mapping in PROPERTY_SENSOR_MAPPINGS: if mapping["property_name"] == self.info.primary_value.property_name: mapping_info = mapping.copy() break return mapping_info
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/zwave_js/binary_sensor.py
"""Support for Ness D8X/D16X devices.""" from collections import namedtuple import datetime from nessclient import ArmingState, Client import voluptuous as vol from homeassistant.components.binary_sensor import DEVICE_CLASSES from homeassistant.const import ( ATTR_CODE, ATTR_STATE, CONF_HOST, CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.helpers import config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send DOMAIN = "ness_alarm" DATA_NESS = "ness_alarm" CONF_DEVICE_PORT = "port" CONF_INFER_ARMING_STATE = "infer_arming_state" CONF_ZONES = "zones" CONF_ZONE_NAME = "name" CONF_ZONE_TYPE = "type" CONF_ZONE_ID = "id" ATTR_OUTPUT_ID = "output_id" DEFAULT_ZONES = [] DEFAULT_SCAN_INTERVAL = datetime.timedelta(minutes=1) DEFAULT_INFER_ARMING_STATE = False SIGNAL_ZONE_CHANGED = "ness_alarm.zone_changed" SIGNAL_ARMING_STATE_CHANGED = "ness_alarm.arming_state_changed" ZoneChangedData = namedtuple("ZoneChangedData", ["zone_id", "state"]) DEFAULT_ZONE_TYPE = "motion" ZONE_SCHEMA = vol.Schema( { vol.Required(CONF_ZONE_NAME): cv.string, vol.Required(CONF_ZONE_ID): cv.positive_int, vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): vol.In(DEVICE_CLASSES), } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_DEVICE_PORT): cv.port, vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): cv.positive_time_period, vol.Optional(CONF_ZONES, default=DEFAULT_ZONES): vol.All( cv.ensure_list, [ZONE_SCHEMA] ), vol.Optional( CONF_INFER_ARMING_STATE, default=DEFAULT_INFER_ARMING_STATE ): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) SERVICE_PANIC = "panic" SERVICE_AUX = "aux" SERVICE_SCHEMA_PANIC = vol.Schema({vol.Required(ATTR_CODE): cv.string}) SERVICE_SCHEMA_AUX = vol.Schema( { vol.Required(ATTR_OUTPUT_ID): cv.positive_int, vol.Optional(ATTR_STATE, default=True): cv.boolean, } ) async def async_setup(hass, config): """Set up the Ness Alarm platform.""" conf = config[DOMAIN] zones = conf[CONF_ZONES] host = conf[CONF_HOST] port = conf[CONF_DEVICE_PORT] scan_interval = conf[CONF_SCAN_INTERVAL] infer_arming_state = conf[CONF_INFER_ARMING_STATE] client = Client( host=host, port=port, loop=hass.loop, update_interval=scan_interval.total_seconds(), infer_arming_state=infer_arming_state, ) hass.data[DATA_NESS] = client async def _close(event): await client.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close) hass.async_create_task( async_load_platform(hass, "binary_sensor", DOMAIN, {CONF_ZONES: zones}, config) ) hass.async_create_task( async_load_platform(hass, "alarm_control_panel", DOMAIN, {}, config) ) def on_zone_change(zone_id: int, state: bool): """Receives and propagates zone state updates.""" async_dispatcher_send( hass, SIGNAL_ZONE_CHANGED, ZoneChangedData(zone_id=zone_id, state=state) ) def on_state_change(arming_state: ArmingState): """Receives and propagates arming state updates.""" async_dispatcher_send(hass, SIGNAL_ARMING_STATE_CHANGED, arming_state) client.on_zone_change(on_zone_change) client.on_state_change(on_state_change) # Force update for current arming status and current zone states hass.loop.create_task(client.keepalive()) hass.loop.create_task(client.update()) async def handle_panic(call): await client.panic(call.data[ATTR_CODE]) async def handle_aux(call): await client.aux(call.data[ATTR_OUTPUT_ID], call.data[ATTR_STATE]) hass.services.async_register( DOMAIN, SERVICE_PANIC, handle_panic, schema=SERVICE_SCHEMA_PANIC ) hass.services.async_register( DOMAIN, SERVICE_AUX, handle_aux, schema=SERVICE_SCHEMA_AUX ) return True
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/ness_alarm/__init__.py
"""Sonos specific exceptions.""" from homeassistant.components.media_player.errors import BrowseError class UnknownMediaType(BrowseError): """Unknown media type."""
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/sonos/exception.py
"""Closures channels module for Zigbee Home Automation.""" import zigpy.zcl.clusters.closures as closures from homeassistant.core import callback from .. import registries from ..const import REPORT_CONFIG_IMMEDIATE, SIGNAL_ATTR_UPDATED from .base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.DoorLock.cluster_id) class DoorLockChannel(ZigbeeChannel): """Door lock channel.""" _value_attribute = 0 REPORT_CONFIG = ({"attr": "lock_state", "config": REPORT_CONFIG_IMMEDIATE},) async def async_update(self): """Retrieve latest state.""" result = await self.get_attribute_value("lock_state", from_cache=True) if result is not None: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", 0, "lock_state", result ) @callback def cluster_command(self, tsn, command_id, args): """Handle a cluster command received on this cluster.""" if ( self._cluster.client_commands is None or self._cluster.client_commands.get(command_id) is None ): return command_name = self._cluster.client_commands.get(command_id, [command_id])[0] if command_name == "operation_event_notification": self.zha_send_event( command_name, { "source": args[0].name, "operation": args[1].name, "code_slot": (args[2] + 1), # start code slots at 1 }, ) @callback def attribute_updated(self, attrid, value): """Handle attribute update from lock cluster.""" attr_name = self.cluster.attributes.get(attrid, [attrid])[0] self.debug( "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value ) if attrid == self._value_attribute: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value ) async def async_set_user_code(self, code_slot: int, user_code: str) -> None: """Set the user code for the code slot.""" await self.set_pin_code( code_slot - 1, # start code slots at 1, Zigbee internals use 0 closures.DoorLock.UserStatus.Enabled, closures.DoorLock.UserType.Unrestricted, user_code, ) async def async_enable_user_code(self, code_slot: int) -> None: """Enable the code slot.""" await self.set_user_status(code_slot - 1, closures.DoorLock.UserStatus.Enabled) async def async_disable_user_code(self, code_slot: int) -> None: """Disable the code slot.""" await self.set_user_status(code_slot - 1, closures.DoorLock.UserStatus.Disabled) async def async_get_user_code(self, code_slot: int) -> int: """Get the user code from the code slot.""" result = await self.get_pin_code(code_slot - 1) return result async def async_clear_user_code(self, code_slot: int) -> None: """Clear the code slot.""" await self.clear_pin_code(code_slot - 1) async def async_clear_all_user_codes(self) -> None: """Clear all code slots.""" await self.clear_all_pin_codes() async def async_set_user_type(self, code_slot: int, user_type: str) -> None: """Set user type.""" await self.set_user_type(code_slot - 1, user_type) async def async_get_user_type(self, code_slot: int) -> str: """Get user type.""" result = await self.get_user_type(code_slot - 1) return result @registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.Shade.cluster_id) class Shade(ZigbeeChannel): """Shade channel.""" @registries.CLIENT_CHANNELS_REGISTRY.register(closures.WindowCovering.cluster_id) class WindowCoveringClient(ClientChannel): """Window client channel.""" @registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.WindowCovering.cluster_id) class WindowCovering(ZigbeeChannel): """Window channel.""" _value_attribute = 8 REPORT_CONFIG = ( {"attr": "current_position_lift_percentage", "config": REPORT_CONFIG_IMMEDIATE}, ) async def async_update(self): """Retrieve latest state.""" result = await self.get_attribute_value( "current_position_lift_percentage", from_cache=False ) self.debug("read current position: %s", result) if result is not None: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", 8, "current_position_lift_percentage", result, ) @callback def attribute_updated(self, attrid, value): """Handle attribute update from window_covering cluster.""" attr_name = self.cluster.attributes.get(attrid, [attrid])[0] self.debug( "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value ) if attrid == self._value_attribute: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value )
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/zha/core/channels/closures.py
"""Get ride details and liveboard details for NMBS (Belgian railway).""" import logging from pyrail import iRail import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_NAME, CONF_SHOW_ON_MAP, TIME_MINUTES, ) import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "NMBS" DEFAULT_ICON = "mdi:train" DEFAULT_ICON_ALERT = "mdi:alert-octagon" CONF_STATION_FROM = "station_from" CONF_STATION_TO = "station_to" CONF_STATION_LIVE = "station_live" CONF_EXCLUDE_VIAS = "exclude_vias" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STATION_FROM): cv.string, vol.Required(CONF_STATION_TO): cv.string, vol.Optional(CONF_STATION_LIVE): cv.string, vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean, } ) def get_time_until(departure_time=None): """Calculate the time between now and a train's departure time.""" if departure_time is None: return 0 delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now() return round(delta.total_seconds() / 60) def get_delay_in_minutes(delay=0): """Get the delay in minutes from a delay in seconds.""" return round(int(delay) / 60) def get_ride_duration(departure_time, arrival_time, delay=0): """Calculate the total travel time in minutes.""" duration = dt_util.utc_from_timestamp( int(arrival_time) ) - dt_util.utc_from_timestamp(int(departure_time)) duration_time = int(round(duration.total_seconds() / 60)) return duration_time + get_delay_in_minutes(delay) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the NMBS sensor with iRail API.""" api_client = iRail() name = config[CONF_NAME] show_on_map = config[CONF_SHOW_ON_MAP] station_from = config[CONF_STATION_FROM] station_to = config[CONF_STATION_TO] station_live = config.get(CONF_STATION_LIVE) excl_vias = config[CONF_EXCLUDE_VIAS] sensors = [ NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias) ] if station_live is not None: sensors.append( NMBSLiveBoard(api_client, station_live, station_from, station_to) ) add_entities(sensors, True) class NMBSLiveBoard(SensorEntity): """Get the next train from a station's liveboard.""" def __init__(self, api_client, live_station, station_from, station_to): """Initialize the sensor for getting liveboard data.""" self._station = live_station self._api_client = api_client self._station_from = station_from self._station_to = station_to self._attrs = {} self._state = None @property def name(self): """Return the sensor default name.""" return f"NMBS Live ({self._station})" @property def unique_id(self): """Return a unique ID.""" unique_id = f"{self._station}_{self._station_from}_{self._station_to}" return f"nmbs_live_{unique_id}" @property def icon(self): """Return the default icon or an alert icon if delays.""" if self._attrs and int(self._attrs["delay"]) > 0: return DEFAULT_ICON_ALERT return DEFAULT_ICON @property def state(self): """Return sensor state.""" return self._state @property def extra_state_attributes(self): """Return the sensor attributes if data is available.""" if self._state is None or not self._attrs: return None delay = get_delay_in_minutes(self._attrs["delay"]) departure = get_time_until(self._attrs["time"]) attrs = { "departure": f"In {departure} minutes", "departure_minutes": departure, "extra_train": int(self._attrs["isExtra"]) > 0, "vehicle_id": self._attrs["vehicle"], "monitored_station": self._station, ATTR_ATTRIBUTION: "https://api.irail.be/", } if delay > 0: attrs["delay"] = f"{delay} minutes" attrs["delay_minutes"] = delay return attrs def update(self): """Set the state equal to the next departure.""" liveboard = self._api_client.get_liveboard(self._station) if liveboard is None or not liveboard["departures"]: return next_departure = liveboard["departures"]["departure"][0] self._attrs = next_departure self._state = ( f"Track {next_departure['platform']} - {next_departure['station']}" ) class NMBSSensor(SensorEntity): """Get the the total travel time for a given connection.""" def __init__( self, api_client, name, show_on_map, station_from, station_to, excl_vias ): """Initialize the NMBS connection sensor.""" self._name = name self._show_on_map = show_on_map self._api_client = api_client self._station_from = station_from self._station_to = station_to self._excl_vias = excl_vias self._attrs = {} self._state = None @property def name(self): """Return the name of the sensor.""" return self._name @property def unit_of_measurement(self): """Return the unit of measurement.""" return TIME_MINUTES @property def icon(self): """Return the sensor default icon or an alert icon if any delay.""" if self._attrs: delay = get_delay_in_minutes(self._attrs["departure"]["delay"]) if delay > 0: return "mdi:alert-octagon" return "mdi:train" @property def extra_state_attributes(self): """Return sensor attributes if data is available.""" if self._state is None or not self._attrs: return None delay = get_delay_in_minutes(self._attrs["departure"]["delay"]) departure = get_time_until(self._attrs["departure"]["time"]) attrs = { "departure": f"In {departure} minutes", "departure_minutes": departure, "destination": self._station_to, "direction": self._attrs["departure"]["direction"]["name"], "platform_arriving": self._attrs["arrival"]["platform"], "platform_departing": self._attrs["departure"]["platform"], "vehicle_id": self._attrs["departure"]["vehicle"], ATTR_ATTRIBUTION: "https://api.irail.be/", } if self._show_on_map and self.station_coordinates: attrs[ATTR_LATITUDE] = self.station_coordinates[0] attrs[ATTR_LONGITUDE] = self.station_coordinates[1] if self.is_via_connection and not self._excl_vias: via = self._attrs["vias"]["via"][0] attrs["via"] = via["station"] attrs["via_arrival_platform"] = via["arrival"]["platform"] attrs["via_transfer_platform"] = via["departure"]["platform"] attrs["via_transfer_time"] = get_delay_in_minutes( via["timeBetween"] ) + get_delay_in_minutes(via["departure"]["delay"]) if delay > 0: attrs["delay"] = f"{delay} minutes" attrs["delay_minutes"] = delay return attrs @property def state(self): """Return the state of the device.""" return self._state @property def station_coordinates(self): """Get the lat, long coordinates for station.""" if self._state is None or not self._attrs: return [] latitude = float(self._attrs["departure"]["stationinfo"]["locationY"]) longitude = float(self._attrs["departure"]["stationinfo"]["locationX"]) return [latitude, longitude] @property def is_via_connection(self): """Return whether the connection goes through another station.""" if not self._attrs: return False return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0 def update(self): """Set the state to the duration of a connection.""" connections = self._api_client.get_connections( self._station_from, self._station_to ) if connections is None or not connections["connection"]: return if int(connections["connection"][0]["departure"]["left"]) > 0: next_connection = connections["connection"][1] else: next_connection = connections["connection"][0] self._attrs = next_connection if self._excl_vias and self.is_via_connection: _LOGGER.debug( "Skipping update of NMBSSensor \ because this connection is a via" ) return duration = get_ride_duration( next_connection["departure"]["time"], next_connection["arrival"]["time"], next_connection["departure"]["delay"], ) self._state = duration
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/nmbs/sensor.py
"""Support for Netgear Arlo IP cameras.""" import logging from haffmpeg.camera import CameraMjpeg import voluptuous as vol from homeassistant.components.camera import PLATFORM_SCHEMA, Camera from homeassistant.components.ffmpeg import DATA_FFMPEG from homeassistant.const import ATTR_BATTERY_LEVEL from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import DATA_ARLO, DEFAULT_BRAND, SIGNAL_UPDATE_ARLO _LOGGER = logging.getLogger(__name__) ARLO_MODE_ARMED = "armed" ARLO_MODE_DISARMED = "disarmed" ATTR_BRIGHTNESS = "brightness" ATTR_FLIPPED = "flipped" ATTR_MIRRORED = "mirrored" ATTR_MOTION = "motion_detection_sensitivity" ATTR_POWERSAVE = "power_save_mode" ATTR_SIGNAL_STRENGTH = "signal_strength" ATTR_UNSEEN_VIDEOS = "unseen_videos" ATTR_LAST_REFRESH = "last_refresh" CONF_FFMPEG_ARGUMENTS = "ffmpeg_arguments" DEFAULT_ARGUMENTS = "-pred 1" POWERSAVE_MODE_MAPPING = {1: "best_battery_life", 2: "optimized", 3: "best_video"} PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up an Arlo IP Camera.""" arlo = hass.data[DATA_ARLO] cameras = [] for camera in arlo.cameras: cameras.append(ArloCam(hass, camera, config)) add_entities(cameras) class ArloCam(Camera): """An implementation of a Netgear Arlo IP camera.""" def __init__(self, hass, camera, device_info): """Initialize an Arlo camera.""" super().__init__() self._camera = camera self._name = self._camera.name self._motion_status = False self._ffmpeg = hass.data[DATA_FFMPEG] self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS) self._last_refresh = None self.attrs = {} def camera_image(self): """Return a still image response from the camera.""" return self._camera.last_image_from_cache async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_UPDATE_ARLO, self.async_write_ha_state ) ) async def handle_async_mjpeg_stream(self, request): """Generate an HTTP MJPEG stream from the camera.""" video = await self.hass.async_add_executor_job( getattr, self._camera, "last_video" ) if not video: error_msg = ( f"Video not found for {self.name}. " f"Is it older than {self._camera.min_days_vdo_cache} days?" ) _LOGGER.error(error_msg) return stream = CameraMjpeg(self._ffmpeg.binary) await stream.open_camera(video.video_url, extra_cmd=self._ffmpeg_arguments) try: stream_reader = await stream.get_reader() return await async_aiohttp_proxy_stream( self.hass, request, stream_reader, self._ffmpeg.ffmpeg_stream_content_type, ) finally: await stream.close() @property def name(self): """Return the name of this camera.""" return self._name @property def extra_state_attributes(self): """Return the state attributes.""" return { name: value for name, value in ( (ATTR_BATTERY_LEVEL, self._camera.battery_level), (ATTR_BRIGHTNESS, self._camera.brightness), (ATTR_FLIPPED, self._camera.flip_state), (ATTR_MIRRORED, self._camera.mirror_state), (ATTR_MOTION, self._camera.motion_detection_sensitivity), ( ATTR_POWERSAVE, POWERSAVE_MODE_MAPPING.get(self._camera.powersave_mode), ), (ATTR_SIGNAL_STRENGTH, self._camera.signal_strength), (ATTR_UNSEEN_VIDEOS, self._camera.unseen_videos), ) if value is not None } @property def model(self): """Return the camera model.""" return self._camera.model_id @property def brand(self): """Return the camera brand.""" return DEFAULT_BRAND @property def motion_detection_enabled(self): """Return the camera motion detection status.""" return self._motion_status def set_base_station_mode(self, mode): """Set the mode in the base station.""" # Get the list of base stations identified by library base_stations = self.hass.data[DATA_ARLO].base_stations # Some Arlo cameras does not have base station # So check if there is base station detected first # if yes, then choose the primary base station # Set the mode on the chosen base station if base_stations: primary_base_station = base_stations[0] primary_base_station.mode = mode def enable_motion_detection(self): """Enable the Motion detection in base station (Arm).""" self._motion_status = True self.set_base_station_mode(ARLO_MODE_ARMED) def disable_motion_detection(self): """Disable the motion detection in base station (Disarm).""" self._motion_status = False self.set_base_station_mode(ARLO_MODE_DISARMED)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/arlo/camera.py
"""Support for Z-Wave fans.""" import math from homeassistant.components.fan import ( DOMAIN as FAN_DOMAIN, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.util.percentage import ( int_states_in_range, percentage_to_ranged_value, ranged_value_to_percentage, ) from .const import DATA_UNSUBSCRIBE, DOMAIN from .entity import ZWaveDeviceEntity SUPPORTED_FEATURES = SUPPORT_SET_SPEED SPEED_RANGE = (1, 99) # off is not included async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Fan from Config Entry.""" @callback def async_add_fan(values): """Add Z-Wave Fan.""" fan = ZwaveFan(values) async_add_entities([fan]) hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append( async_dispatcher_connect(hass, f"{DOMAIN}_new_{FAN_DOMAIN}", async_add_fan) ) class ZwaveFan(ZWaveDeviceEntity, FanEntity): """Representation of a Z-Wave fan.""" async def async_set_percentage(self, percentage): """Set the speed percentage of the fan.""" if percentage is None: # Value 255 tells device to return to previous value zwave_speed = 255 elif percentage == 0: zwave_speed = 0 else: zwave_speed = math.ceil(percentage_to_ranged_value(SPEED_RANGE, percentage)) self.values.primary.send_value(zwave_speed) async def async_turn_on( self, speed=None, percentage=None, preset_mode=None, **kwargs ): """Turn the device on.""" await self.async_set_percentage(percentage) async def async_turn_off(self, **kwargs): """Turn the device off.""" self.values.primary.send_value(0) @property def is_on(self): """Return true if device is on (speed above 0).""" return self.values.primary.value > 0 @property def percentage(self): """Return the current speed. The Z-Wave speed value is a byte 0-255. 255 means previous value. The normal range of the speed is 0-99. 0 means off. """ return ranged_value_to_percentage(SPEED_RANGE, self.values.primary.value) @property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" return int_states_in_range(SPEED_RANGE) @property def supported_features(self): """Flag supported features.""" return SUPPORTED_FEATURES
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/ozw/fan.py
"""Config flow for Ring integration.""" import logging from oauthlib.oauth2 import AccessDeniedError, MissingTokenError from ring_doorbell import Auth import voluptuous as vol from homeassistant import config_entries, const, core, exceptions from . import DOMAIN _LOGGER = logging.getLogger(__name__) async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect.""" auth = Auth(f"HomeAssistant/{const.__version__}") try: token = await hass.async_add_executor_job( auth.fetch_token, data["username"], data["password"], data.get("2fa"), ) except MissingTokenError as err: raise Require2FA from err except AccessDeniedError as err: raise InvalidAuth from err return token class RingConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Ring.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL user_pass = None async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: token = await validate_input(self.hass, user_input) await self.async_set_unique_id(user_input["username"]) return self.async_create_entry( title=user_input["username"], data={"username": user_input["username"], "token": token}, ) except Require2FA: self.user_pass = user_input return await self.async_step_2fa() except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return self.async_show_form( step_id="user", data_schema=vol.Schema({"username": str, "password": str}), errors=errors, ) async def async_step_2fa(self, user_input=None): """Handle 2fa step.""" if user_input: return await self.async_step_user({**self.user_pass, **user_input}) return self.async_show_form( step_id="2fa", data_schema=vol.Schema({"2fa": str}), ) class Require2FA(exceptions.HomeAssistantError): """Error to indicate we require 2FA.""" class InvalidAuth(exceptions.HomeAssistantError): """Error to indicate there is invalid auth."""
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/ring/config_flow.py
"""Describe logbook events.""" from homeassistant.components.logbook import LazyEventPartialState from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME from homeassistant.core import HomeAssistant, callback from . import ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_TRIGGERED @callback def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore """Describe logbook events.""" @callback def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore """Describe a logbook event.""" data = event.data message = "has been triggered" if ATTR_SOURCE in data: message = f"{message} by {data[ATTR_SOURCE]}" return { "name": data.get(ATTR_NAME), "message": message, "source": data.get(ATTR_SOURCE), "entity_id": data.get(ATTR_ENTITY_ID), "context_id": event.context_id, } async_describe_event( DOMAIN, EVENT_AUTOMATION_TRIGGERED, async_describe_logbook_event )
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/automation/logbook.py
"""Interface implementation for cloud client.""" from __future__ import annotations import asyncio import logging from pathlib import Path from typing import Any import aiohttp from hass_nabucasa.client import CloudClient as Interface from homeassistant.components.alexa import ( errors as alexa_errors, smart_home as alexa_sh, ) from homeassistant.components.google_assistant import const as gc, smart_home as ga from homeassistant.const import HTTP_OK from homeassistant.core import Context, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later from homeassistant.util.aiohttp import MockRequest from . import alexa_config, google_config, utils from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN from .prefs import CloudPreferences class CloudClient(Interface): """Interface class for Home Assistant Cloud.""" def __init__( self, hass: HomeAssistant, prefs: CloudPreferences, websession: aiohttp.ClientSession, alexa_user_config: dict[str, Any], google_user_config: dict[str, Any], ): """Initialize client interface to Cloud.""" self._hass = hass self._prefs = prefs self._websession = websession self.google_user_config = google_user_config self.alexa_user_config = alexa_user_config self._alexa_config = None self._google_config = None @property def base_path(self) -> Path: """Return path to base dir.""" return Path(self._hass.config.config_dir) @property def prefs(self) -> CloudPreferences: """Return Cloud preferences.""" return self._prefs @property def loop(self) -> asyncio.BaseEventLoop: """Return client loop.""" return self._hass.loop @property def websession(self) -> aiohttp.ClientSession: """Return client session for aiohttp.""" return self._websession @property def aiohttp_runner(self) -> aiohttp.web.AppRunner: """Return client webinterface aiohttp application.""" return self._hass.http.runner @property def cloudhooks(self) -> dict[str, dict[str, str]]: """Return list of cloudhooks.""" return self._prefs.cloudhooks @property def remote_autostart(self) -> bool: """Return true if we want start a remote connection.""" return self._prefs.remote_enabled async def get_alexa_config(self) -> alexa_config.AlexaConfig: """Return Alexa config.""" if self._alexa_config is None: assert self.cloud is not None cloud_user = await self._prefs.get_cloud_user() self._alexa_config = alexa_config.AlexaConfig( self._hass, self.alexa_user_config, cloud_user, self._prefs, self.cloud ) return self._alexa_config async def get_google_config(self) -> google_config.CloudGoogleConfig: """Return Google config.""" if not self._google_config: assert self.cloud is not None cloud_user = await self._prefs.get_cloud_user() self._google_config = google_config.CloudGoogleConfig( self._hass, self.google_user_config, cloud_user, self._prefs, self.cloud ) await self._google_config.async_initialize() return self._google_config async def logged_in(self) -> None: """When user logs in.""" is_new_user = await self.prefs.async_set_username(self.cloud.username) async def enable_alexa(_): """Enable Alexa.""" aconf = await self.get_alexa_config() try: await aconf.async_enable_proactive_mode() except aiohttp.ClientError as err: # If no internet available yet if self._hass.is_running: logging.getLogger(__package__).warning( "Unable to activate Alexa Report State: %s. Retrying in 30 seconds", err, ) async_call_later(self._hass, 30, enable_alexa) except alexa_errors.NoTokenAvailable: pass async def enable_google(_): """Enable Google.""" gconf = await self.get_google_config() gconf.async_enable_local_sdk() if gconf.should_report_state: gconf.async_enable_report_state() if is_new_user: await gconf.async_sync_entities(gconf.agent_user_id) tasks = [] if self._prefs.alexa_enabled and self._prefs.alexa_report_state: tasks.append(enable_alexa) if self._prefs.google_enabled: tasks.append(enable_google) if tasks: await asyncio.gather(*[task(None) for task in tasks]) async def cleanups(self) -> None: """Cleanup some stuff after logout.""" await self.prefs.async_set_username(None) self._google_config = None @callback def user_message(self, identifier: str, title: str, message: str) -> None: """Create a message for user to UI.""" self._hass.components.persistent_notification.async_create( message, title, identifier ) @callback def dispatcher_message(self, identifier: str, data: Any = None) -> None: """Match cloud notification to dispatcher.""" if identifier.startswith("remote_"): async_dispatcher_send(self._hass, DISPATCHER_REMOTE_UPDATE, data) async def async_alexa_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: """Process cloud alexa message to client.""" cloud_user = await self._prefs.get_cloud_user() aconfig = await self.get_alexa_config() return await alexa_sh.async_handle_message( self._hass, aconfig, payload, context=Context(user_id=cloud_user), enabled=self._prefs.alexa_enabled, ) async def async_google_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: """Process cloud google message to client.""" if not self._prefs.google_enabled: return ga.turned_off_response(payload) gconf = await self.get_google_config() return await ga.async_handle_message( self._hass, gconf, gconf.cloud_user, payload, gc.SOURCE_CLOUD ) async def async_webhook_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: """Process cloud webhook message to client.""" cloudhook_id = payload["cloudhook_id"] found = None for cloudhook in self._prefs.cloudhooks.values(): if cloudhook["cloudhook_id"] == cloudhook_id: found = cloudhook break if found is None: return {"status": HTTP_OK} request = MockRequest( content=payload["body"].encode("utf-8"), headers=payload["headers"], method=payload["method"], query_string=payload["query"], mock_source=DOMAIN, ) response = await self._hass.components.webhook.async_handle_webhook( found["webhook_id"], request ) response_dict = utils.aiohttp_serialize_response(response) body = response_dict.get("body") return { "body": body, "status": response_dict["status"], "headers": {"Content-Type": response.content_type}, } async def async_cloudhooks_update(self, data: dict[str, dict[str, str]]) -> None: """Update local list of cloudhooks.""" await self._prefs.async_update(cloudhooks=data)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/cloud/client.py
"""Config flow for Elk-M1 Control integration.""" import asyncio import logging from urllib.parse import urlparse import elkm1_lib as elkm1 import voluptuous as vol from homeassistant import config_entries, exceptions from homeassistant.const import ( CONF_ADDRESS, CONF_HOST, CONF_PASSWORD, CONF_PREFIX, CONF_PROTOCOL, CONF_TEMPERATURE_UNIT, CONF_USERNAME, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.util import slugify from . import async_wait_for_elk_to_sync from .const import CONF_AUTO_CONFIGURE, DOMAIN _LOGGER = logging.getLogger(__name__) PROTOCOL_MAP = {"secure": "elks://", "non-secure": "elk://", "serial": "serial://"} DATA_SCHEMA = vol.Schema( { vol.Required(CONF_PROTOCOL, default="secure"): vol.In( ["secure", "non-secure", "serial"] ), vol.Required(CONF_ADDRESS): str, vol.Optional(CONF_USERNAME, default=""): str, vol.Optional(CONF_PASSWORD, default=""): str, vol.Optional(CONF_PREFIX, default=""): str, vol.Optional(CONF_TEMPERATURE_UNIT, default=TEMP_FAHRENHEIT): vol.In( [TEMP_FAHRENHEIT, TEMP_CELSIUS] ), } ) VALIDATE_TIMEOUT = 35 async def validate_input(data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ userid = data.get(CONF_USERNAME) password = data.get(CONF_PASSWORD) prefix = data[CONF_PREFIX] url = _make_url_from_data(data) requires_password = url.startswith("elks://") if requires_password and (not userid or not password): raise InvalidAuth elk = elkm1.Elk( {"url": url, "userid": userid, "password": password, "element_list": ["panel"]} ) elk.connect() if not await async_wait_for_elk_to_sync(elk, VALIDATE_TIMEOUT, url): raise InvalidAuth device_name = data[CONF_PREFIX] if data[CONF_PREFIX] else "ElkM1" # Return info that you want to store in the config entry. return {"title": device_name, CONF_HOST: url, CONF_PREFIX: slugify(prefix)} def _make_url_from_data(data): host = data.get(CONF_HOST) if host: return host protocol = PROTOCOL_MAP[data[CONF_PROTOCOL]] address = data[CONF_ADDRESS] return f"{protocol}{address}" class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Elk-M1 Control.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH def __init__(self): """Initialize the elkm1 config flow.""" self.importing = False async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: if self._url_already_configured(_make_url_from_data(user_input)): return self.async_abort(reason="address_already_configured") try: info = await validate_input(user_input) except asyncio.TimeoutError: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if "base" not in errors: await self.async_set_unique_id(user_input[CONF_PREFIX]) self._abort_if_unique_id_configured() if self.importing: return self.async_create_entry(title=info["title"], data=user_input) return self.async_create_entry( title=info["title"], data={ CONF_HOST: info[CONF_HOST], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_AUTO_CONFIGURE: True, CONF_TEMPERATURE_UNIT: user_input[CONF_TEMPERATURE_UNIT], CONF_PREFIX: info[CONF_PREFIX], }, ) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) async def async_step_import(self, user_input): """Handle import.""" self.importing = True return await self.async_step_user(user_input) def _url_already_configured(self, url): """See if we already have a elkm1 matching user input configured.""" existing_hosts = { urlparse(entry.data[CONF_HOST]).hostname for entry in self._async_current_entries() } return urlparse(url).hostname in existing_hosts class InvalidAuth(exceptions.HomeAssistantError): """Error to indicate there is invalid auth."""
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/elkm1/config_flow.py
"""Support for HDMI CEC devices as switches.""" import logging from homeassistant.components.switch import DOMAIN, SwitchEntity from homeassistant.const import STATE_OFF, STATE_ON, STATE_STANDBY from . import ATTR_NEW, CecEntity _LOGGER = logging.getLogger(__name__) ENTITY_ID_FORMAT = DOMAIN + ".{}" def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return HDMI devices as switches.""" if ATTR_NEW in discovery_info: _LOGGER.info("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) entities = [] for device in discovery_info[ATTR_NEW]: hdmi_device = hass.data.get(device) entities.append(CecSwitchEntity(hdmi_device, hdmi_device.logical_address)) add_entities(entities, True) class CecSwitchEntity(CecEntity, SwitchEntity): """Representation of a HDMI device as a Switch.""" def __init__(self, device, logical) -> None: """Initialize the HDMI device.""" CecEntity.__init__(self, device, logical) self.entity_id = f"{DOMAIN}.hdmi_{hex(self._logical_address)[2:]}" def turn_on(self, **kwargs) -> None: """Turn device on.""" self._device.turn_on() self._state = STATE_ON self.schedule_update_ha_state(force_refresh=False) def turn_off(self, **kwargs) -> None: """Turn device off.""" self._device.turn_off() self._state = STATE_OFF self.schedule_update_ha_state(force_refresh=False) def toggle(self, **kwargs): """Toggle the entity.""" self._device.toggle() if self._state == STATE_ON: self._state = STATE_OFF else: self._state = STATE_ON self.schedule_update_ha_state(force_refresh=False) @property def is_on(self) -> bool: """Return True if entity is on.""" return self._state == STATE_ON @property def is_standby(self): """Return true if device is in standby.""" return self._state == STATE_OFF or self._state == STATE_STANDBY @property def state(self) -> str: """Return the cached state of device.""" return self._state
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/hdmi_cec/switch.py
"""Support for SMS notification services.""" import logging import gammu # pylint: disable=import-error import voluptuous as vol from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService from homeassistant.const import CONF_NAME, CONF_RECIPIENT import homeassistant.helpers.config_validation as cv from .const import DOMAIN, SMS_GATEWAY _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_RECIPIENT): cv.string, vol.Optional(CONF_NAME): cv.string} ) def get_service(hass, config, discovery_info=None): """Get the SMS notification service.""" if SMS_GATEWAY not in hass.data[DOMAIN]: _LOGGER.error("SMS gateway not found, cannot initialize service") return gateway = hass.data[DOMAIN][SMS_GATEWAY] if discovery_info is None: number = config[CONF_RECIPIENT] else: number = discovery_info[CONF_RECIPIENT] return SMSNotificationService(gateway, number) class SMSNotificationService(BaseNotificationService): """Implement the notification service for SMS.""" def __init__(self, gateway, number): """Initialize the service.""" self.gateway = gateway self.number = number async def async_send_message(self, message="", **kwargs): """Send SMS message.""" smsinfo = { "Class": -1, "Unicode": False, "Entries": [{"ID": "ConcatenatedTextLong", "Buffer": message}], } try: # Encode messages encoded = gammu.EncodeSMS(smsinfo) except gammu.GSMError as exc: _LOGGER.error("Encoding message %s failed: %s", message, exc) return # Send messages for encoded_message in encoded: # Fill in numbers encoded_message["SMSC"] = {"Location": 1} encoded_message["Number"] = self.number try: # Actually send the message await self.gateway.send_sms_async(encoded_message) except gammu.GSMError as exc: _LOGGER.error("Sending to %s failed: %s", self.number, exc)
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/sms/notify.py
"""Support for Fibaro binary sensors.""" from homeassistant.components.binary_sensor import ( DEVICE_CLASS_DOOR, DEVICE_CLASS_MOTION, DEVICE_CLASS_SMOKE, DEVICE_CLASS_WINDOW, DOMAIN, BinarySensorEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_ICON from . import FIBARO_DEVICES, FibaroDevice SENSOR_TYPES = { "com.fibaro.floodSensor": ["Flood", "mdi:water", "flood"], "com.fibaro.motionSensor": ["Motion", "mdi:run", DEVICE_CLASS_MOTION], "com.fibaro.doorSensor": ["Door", "mdi:window-open", DEVICE_CLASS_DOOR], "com.fibaro.windowSensor": ["Window", "mdi:window-open", DEVICE_CLASS_WINDOW], "com.fibaro.smokeSensor": ["Smoke", "mdi:smoking", DEVICE_CLASS_SMOKE], "com.fibaro.FGMS001": ["Motion", "mdi:run", DEVICE_CLASS_MOTION], "com.fibaro.heatDetector": ["Heat", "mdi:fire", "heat"], } def setup_platform(hass, config, add_entities, discovery_info=None): """Perform the setup for Fibaro controller devices.""" if discovery_info is None: return add_entities( [ FibaroBinarySensor(device) for device in hass.data[FIBARO_DEVICES]["binary_sensor"] ], True, ) class FibaroBinarySensor(FibaroDevice, BinarySensorEntity): """Representation of a Fibaro Binary Sensor.""" def __init__(self, fibaro_device): """Initialize the binary_sensor.""" self._state = None super().__init__(fibaro_device) self.entity_id = f"{DOMAIN}.{self.ha_id}" stype = None devconf = fibaro_device.device_config if fibaro_device.type in SENSOR_TYPES: stype = fibaro_device.type elif fibaro_device.baseType in SENSOR_TYPES: stype = fibaro_device.baseType if stype: self._device_class = SENSOR_TYPES[stype][2] self._icon = SENSOR_TYPES[stype][1] else: self._device_class = None self._icon = None # device_config overrides: self._device_class = devconf.get(CONF_DEVICE_CLASS, self._device_class) self._icon = devconf.get(CONF_ICON, self._icon) @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def device_class(self): """Return the device class of the sensor.""" return self._device_class @property def is_on(self): """Return true if sensor is on.""" return self._state def update(self): """Get the latest data and update the state.""" self._state = self.current_binary_state
"""The tests for mqtt camera component.""" import json from unittest.mock import patch import pytest from homeassistant.components import camera from homeassistant.setup import async_setup_component from .test_common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, help_test_discovery_broken, help_test_discovery_removal, help_test_discovery_update, help_test_discovery_update_attr, help_test_discovery_update_unchanged, help_test_entity_debug_info_message, help_test_entity_device_info_remove, help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_setting_attribute_via_mqtt_json_message, help_test_setting_attribute_with_template, help_test_unique_id, help_test_update_with_json_attrs_bad_JSON, help_test_update_with_json_attrs_not_dict, ) from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" await async_setup_component( hass, "camera", {"camera": {"platform": "mqtt", "topic": topic, "name": "Test Camera"}}, ) await hass.async_block_till_done() url = hass.states.get("camera.test_camera").attributes["entity_picture"] async_fire_mqtt_message(hass, topic, "beer") client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "beer" async def test_availability_when_connection_lost(hass, mqtt_mock): """Test availability after MQTT disconnection.""" await help_test_availability_when_connection_lost( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_default_availability_payload(hass, mqtt_mock): """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_custom_availability_payload(hass, mqtt_mock): """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_setting_attribute_with_template(hass, mqtt_mock): """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog): """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_JSON( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr(hass, mqtt_mock, caplog): """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ { "platform": "mqtt", "name": "Test 1", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, { "platform": "mqtt", "name": "Test 2", "topic": "test-topic", "unique_id": "TOTALLY_UNIQUE", }, ] } await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[camera.DOMAIN]) await help_test_discovery_removal(hass, mqtt_mock, caplog, camera.DOMAIN, data) async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_update( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_discovery_update_unchanged_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' with patch( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' await help_test_discovery_broken( hass, mqtt_mock, caplog, camera.DOMAIN, data1, data2 ) async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_connection( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT camera device registry integration.""" await help_test_entity_device_info_with_identifier( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" await help_test_entity_device_info_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_device_info_remove(hass, mqtt_mock): """Test device registry remove.""" await help_test_entity_device_info_remove( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_id_update_subscriptions(hass, mqtt_mock): """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) async def test_entity_id_update_discovery_update(hass, mqtt_mock): """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG ) async def test_entity_debug_info_message(hass, mqtt_mock): """Test MQTT debug info.""" await help_test_entity_debug_info_message( hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG, "test_topic", b"ON" )
adrienbrault/home-assistant
tests/components/mqtt/test_camera.py
homeassistant/components/fibaro/binary_sensor.py
from abc import ABCMeta, abstractmethod from typing import Optional, Iterable from ontobio.model.similarity import SimResult class SimilarityEngine(metaclass=ABCMeta): """ Interface for similarity engines, methods for search and compare Differs from ontobio.sim.api.interfaces.SimApi in that a similarity engine is type specific, eg phenotype, go term and handles individuals appropriately """ @abstractmethod def compare(self, reference_ids: Iterable, query_profiles: Iterable[Iterable], method: Optional) -> SimResult: """ Given two lists of entities (classes, individuals), resolves them to some type (phenotypes, go terms, etc) and returns their similarity """ pass @abstractmethod def search(self, id_list: Iterable, negated_ids: Iterable, limit: Optional[int], taxon_filter: Optional, category_filter: Optional, method: Optional)-> SimResult: """ Given an input iterable of classes or individuals, resolves to target classes (phenotypes, go terms, etc) and provides a ranking of similar profiles """ pass
import pytest from ontobio import ontol from ontobio import ontol_factory def test_missing_node_is_none(): ontology = ontol_factory.OntologyFactory().create("tests/resources/goslim_generic.json") assert ontology.node("GO:0") == None def test_merge_copies_logical_definitions(): pombe_ontology = ontol_factory.OntologyFactory().create("tests/resources/goslim_pombe.json") assert len(pombe_ontology.all_logical_definitions) == 0 nucleus_ontology = ontol_factory.OntologyFactory().create("tests/resources/nucleus.json") assert len(nucleus_ontology.all_logical_definitions) == 2 # Test logical definition copy in merge pombe_ontology.merge([nucleus_ontology]) assert len(pombe_ontology.all_property_chain_axioms) == 2 def test_merge_copies_property_chain_axioms(): nucleus_ontology = ontol_factory.OntologyFactory().create("tests/resources/nucleus.json") assert len(nucleus_ontology.all_property_chain_axioms) == 0 goslim_ontology = ontol_factory.OntologyFactory().create("tests/resources/goslim_generic.json") assert len(goslim_ontology.all_property_chain_axioms) == 2 # Test property chain axiom copy in merge nucleus_ontology.merge([goslim_ontology]) assert len(nucleus_ontology.all_property_chain_axioms) == 2 def test_ontology_synonyms(): ontology = ontol_factory.OntologyFactory().create("tests/resources/nucleus_new.json") syn = ontology.synonyms("GO:0005634") assert syn[0].__dict__ == ontol.Synonym("GO:0005634", val="cell nucleus", pred="hasExactSynonym", lextype=None, xrefs=[], ontology=None, confidence=1.0, synonymType="http://purl.obolibrary.org/obo/go-test#systematic_synonym").__dict__
biolink/ontobio
tests/test_ontol.py
ontobio/sim/sim_engine.py
""" Classes for representing ontologies backed by a SPARQL endpoint ``` Ontology RemoteSparqlOntology EagerRemoteSparqlOntology LazyRemoteSparqlOntology ``` """ import networkx as nx import logging import ontobio.ontol from ontobio.ontol import Ontology, Synonym, TextDefinition from ontobio.sparql.sparql_ontol_utils import get_digraph, get_named_graph, get_xref_graph, run_sparql, fetchall_syns, fetchall_textdefs, fetchall_labels, fetchall_obs, OIO_SYNS from prefixcommons.curie_util import contract_uri, expand_uri, get_prefixes logger = logging.getLogger(__name__) class RemoteSparqlOntology(Ontology): """ Local or remote ontology """ def extract_subset(self, subset): """ Find all nodes in a subset. We assume the oboInOwl encoding of subsets, and subset IDs are IRIs """ # note subsets have an unusual encoding query = """ prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#> SELECT ?c WHERE {{ GRAPH <{g}> {{ ?c oboInOwl:inSubset ?s FILTER regex(?s,'#{s}$','i') }} }} """.format(s=subset, g=self.graph_name) bindings = run_sparql(query) return [r['c']['value'] for r in bindings] def subsets(self): """ Find all subsets for an ontology """ # note subsets have an unusual encoding query = """ prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#> SELECT DISTINCT ?s WHERE {{ GRAPH <{g}> {{ ?c oboInOwl:inSubset ?s }} }} """.format(g=self.graph_name) bindings = run_sparql(query) return [r['s']['value'] for r in bindings] def text_definition(self, nid): logger.info("lookup defs for {}".format(nid)) if self.all_text_definitions_cache is None: self.all_text_definitions() return super().text_definition(nid) # Override def all_text_definitions(self): logger.debug("Fetching all textdefs...") if self.all_text_definitions_cache is None: vals = fetchall_textdefs(self.graph_name) tds = [TextDefinition(c,v) for (c,v) in vals] for td in tds: self.add_text_definition(td) self.all_text_definitions_cache = tds # TODO: check if still used return self.all_text_definitions_cache def is_obsolete(self, nid): logger.info("lookup obs for {}".format(nid)) if self.all_obsoletes_cache is None: self.all_obsoletes() return super().is_obsolete(nid) def all_obsoletes(self): logger.debug("Fetching all obsoletes...") if self.all_obsoletes_cache is None: obsnodes = fetchall_obs(self.graph_name) for n in obsnodes: self.set_obsolete(n) self.all_obsoletes_cache = obsnodes # TODO: check if still used return self.all_obsoletes_cache def synonyms(self, nid, **args): logger.info("lookup syns for {}".format(nid)) if self.all_synonyms_cache is None: self.all_synonyms() return super().synonyms(nid, **args) # Override def all_synonyms(self, include_label=False): logger.debug("Fetching all syns...") # TODO: include_label in cache if self.all_synonyms_cache is None: syntups = fetchall_syns(self.graph_name) syns = [Synonym(t[0],pred=t[1], val=t[2]) for t in syntups] for syn in syns: self.add_synonym(syn) if include_label: lsyns = [Synonym(x, pred='label', val=self.label(x)) for x in self.nodes()] syns = syns + lsyns self.all_synonyms_cache = syns # TODO: check if still used return self.all_synonyms_cache # Override def subontology(self, nodes=None, **args): # ensure caches populated self.all_synonyms() self.all_text_definitions() return super().subontology(nodes, **args) # Override def resolve_names(self, names, is_remote=False, synonyms=False, **args): logger.debug("resolving via {}".format(self)) if not is_remote: # TODO: ensure synonyms present return super().resolve_names(names, synonyms, **args) else: results = set() for name in names: results.update( self._search(name, 'rdfs:label', **args) ) if synonyms: for pred in OIO_SYNS.values(): results.update( self._search(name, pred, **args) ) logger.info("REMOTE RESULTS="+str(results)) return list(results) def _search(self, searchterm, pred, **args): """ Search for things using labels """ # TODO: DRY with sparql_ontol_utils searchterm = searchterm.replace('%','.*') namedGraph = get_named_graph(self.handle) query = """ prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#> SELECT ?c WHERE {{ GRAPH <{g}> {{ ?c {pred} ?l FILTER regex(?l,'{s}','i') }} }} """.format(pred=pred, s=searchterm, g=namedGraph) bindings = run_sparql(query) return [r['c']['value'] for r in bindings] def sparql(self, select='*', body=None, inject_prefixes=None, single_column=False): """ Execute a SPARQL query. The query is specified using `select` and `body` parameters. The argument for the Named Graph is injected into the query. The select parameter should be either '*' or a list of vars (not prefixed with '?'). - If '*' is passed, then the result is a list of dicts, { $var: {value: $val } } - If a list of vars is passed, then the result is a list of lists - Unless single_column=True, in which case the results are a simple list of values from the first var The inject_prefixes argument can be used to inject a list of prefixes - these are expanded using the prefixcommons library """ if inject_prefixes is None: inject_prefixes = [] namedGraph = get_named_graph(self.handle) cols = [] select_val = None if select is None or select=='*': if not single_column: cols=None select_val='*' else: if isinstance(cols,list): cols = [select] else: cols = select select_val = ", ".join(['?'+c for c in cols]) prefixes = "" if inject_prefixes is not None: plist = ["prefix {}: <{}> ".format(p,expand_uri(p+":")) for p in inject_prefixes if p != "" and p is not None] prefixes = "\n".join(plist) query = """ {prefixes} SELECT {s} WHERE {{ GRAPH <{g}> {{ {b} }} }} """.format(prefixes=prefixes, s=select_val, b=body, g=namedGraph) bindings = run_sparql(query) if len(bindings) == 0: return [] if cols is None: return bindings else: if single_column: c = list(bindings[0].keys())[0] return [r[c]['value'] for r in bindings] else: return [r[c]['value'] for c in cols for r in bindings] class EagerRemoteSparqlOntology(RemoteSparqlOntology): """ Local or remote ontology """ def __init__(self, handle=None): """ initializes based on an ontology name """ self.id = get_named_graph(handle) self.handle = handle logger.info("Creating eager-remote-sparql from "+str(handle)) g = get_digraph(handle, None, True) logger.info("Graph:"+str(g)) if len(g.nodes()) == 0 and len(g.edges()) == 0: logger.error("Empty graph for '{}' - did you use the correct id?". format(handle)) self.graph = g self.graph_name = get_named_graph(handle) self.xref_graph = get_xref_graph(handle) self.all_logical_definitions = [] self.all_synonyms_cache = None self.all_text_definitions_cache = None self.all_obsoletes_cache = None logger.info("Graph: {} LDs: {}".format(self.graph, self.all_logical_definitions)) def __str__(self): return "h:{} g:{}".format(self.handle, self.graph) class LazyRemoteSparqlOntology(RemoteSparqlOntology): """ Local or remote ontology """ def __init__(self): self.all_logical_definitions = [] ## TODO
import pytest from ontobio import ontol from ontobio import ontol_factory def test_missing_node_is_none(): ontology = ontol_factory.OntologyFactory().create("tests/resources/goslim_generic.json") assert ontology.node("GO:0") == None def test_merge_copies_logical_definitions(): pombe_ontology = ontol_factory.OntologyFactory().create("tests/resources/goslim_pombe.json") assert len(pombe_ontology.all_logical_definitions) == 0 nucleus_ontology = ontol_factory.OntologyFactory().create("tests/resources/nucleus.json") assert len(nucleus_ontology.all_logical_definitions) == 2 # Test logical definition copy in merge pombe_ontology.merge([nucleus_ontology]) assert len(pombe_ontology.all_property_chain_axioms) == 2 def test_merge_copies_property_chain_axioms(): nucleus_ontology = ontol_factory.OntologyFactory().create("tests/resources/nucleus.json") assert len(nucleus_ontology.all_property_chain_axioms) == 0 goslim_ontology = ontol_factory.OntologyFactory().create("tests/resources/goslim_generic.json") assert len(goslim_ontology.all_property_chain_axioms) == 2 # Test property chain axiom copy in merge nucleus_ontology.merge([goslim_ontology]) assert len(nucleus_ontology.all_property_chain_axioms) == 2 def test_ontology_synonyms(): ontology = ontol_factory.OntologyFactory().create("tests/resources/nucleus_new.json") syn = ontology.synonyms("GO:0005634") assert syn[0].__dict__ == ontol.Synonym("GO:0005634", val="cell nucleus", pred="hasExactSynonym", lextype=None, xrefs=[], ontology=None, confidence=1.0, synonymType="http://purl.obolibrary.org/obo/go-test#systematic_synonym").__dict__
biolink/ontobio
tests/test_ontol.py
ontobio/sparql/sparql_ontology.py
"""Support for the Tuya scenes.""" from homeassistant.components.scene import DOMAIN, Scene from . import DATA_TUYA, TuyaDevice ENTITY_ID_FORMAT = DOMAIN + '.{}' def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Tuya scenes.""" if discovery_info is None: return tuya = hass.data[DATA_TUYA] dev_ids = discovery_info.get('dev_ids') devices = [] for dev_id in dev_ids: device = tuya.get_device_by_id(dev_id) if device is None: continue devices.append(TuyaScene(device)) add_entities(devices) class TuyaScene(TuyaDevice, Scene): """Tuya Scene.""" def __init__(self, tuya): """Init Tuya scene.""" super().__init__(tuya) self.entity_id = ENTITY_ID_FORMAT.format(tuya.object_id()) def activate(self): """Activate the scene.""" self.tuya.activate()
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/tuya/scene.py
""" Virtual gateway for Zigbee Home Automation. For more details about this component, please refer to the documentation at https://home-assistant.io/components/zha/ """ import asyncio import collections import itertools import logging import os import traceback from homeassistant.components.system_log import LogEntry, _figure_out_source from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.entity_component import EntityComponent from ..api import async_get_device_info from .channels import MAINS_POWERED, ZDOChannel from .const import ( ADD_DEVICE_RELAY_LOGGERS, ATTR_MANUFACTURER, BELLOWS, CONF_BAUDRATE, CONF_DATABASE, CONF_RADIO_TYPE, CONF_USB_PATH, CONTROLLER, CURRENT, DATA_ZHA, DATA_ZHA_BRIDGE_ID, DATA_ZHA_CORE_COMPONENT, DATA_ZHA_GATEWAY, DEBUG_LEVELS, DEFAULT_BAUDRATE, DEFAULT_DATABASE_NAME, DEVICE_FULL_INIT, DEVICE_INFO, DEVICE_JOINED, DEVICE_REMOVED, DOMAIN, IEEE, LOG_ENTRY, LOG_OUTPUT, MODEL, NWK, ORIGINAL, RADIO, RADIO_DESCRIPTION, RAW_INIT, SIGNAL_REMOVE, SIGNATURE, TYPE, ZHA, ZHA_GW_MSG, ZIGPY, ZIGPY_DECONZ, ZIGPY_XBEE) from .device import DeviceStatus, ZHADevice from .discovery import ( async_create_device_entity, async_dispatch_discovery_info, async_process_endpoint) from .patches import apply_application_controller_patch from .registries import RADIO_TYPES from .store import async_get_registry _LOGGER = logging.getLogger(__name__) EntityReference = collections.namedtuple( 'EntityReference', 'reference_id zha_device cluster_channels device_info') class ZHAGateway: """Gateway that handles events that happen on the ZHA Zigbee network.""" def __init__(self, hass, config): """Initialize the gateway.""" self._hass = hass self._config = config self._component = EntityComponent(_LOGGER, DOMAIN, hass) self._devices = {} self._device_registry = collections.defaultdict(list) self.zha_storage = None self.application_controller = None self.radio_description = None hass.data[DATA_ZHA][DATA_ZHA_CORE_COMPONENT] = self._component hass.data[DATA_ZHA][DATA_ZHA_GATEWAY] = self self._log_levels = { ORIGINAL: async_capture_log_levels(), CURRENT: async_capture_log_levels() } self.debug_enabled = False self._log_relay_handler = LogRelayHandler(hass, self) async def async_initialize(self, config_entry): """Initialize controller and connect radio.""" self.zha_storage = await async_get_registry(self._hass) usb_path = config_entry.data.get(CONF_USB_PATH) baudrate = self._config.get(CONF_BAUDRATE, DEFAULT_BAUDRATE) radio_type = config_entry.data.get(CONF_RADIO_TYPE) radio_details = RADIO_TYPES[radio_type][RADIO]() radio = radio_details[RADIO] self.radio_description = RADIO_TYPES[radio_type][RADIO_DESCRIPTION] await radio.connect(usb_path, baudrate) if CONF_DATABASE in self._config: database = self._config[CONF_DATABASE] else: database = os.path.join( self._hass.config.config_dir, DEFAULT_DATABASE_NAME) self.application_controller = radio_details[CONTROLLER]( radio, database) apply_application_controller_patch(self) self.application_controller.add_listener(self) await self.application_controller.startup(auto_form=True) self._hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID] = str( self.application_controller.ieee) init_tasks = [] for device in self.application_controller.devices.values(): init_tasks.append(self.async_device_initialized(device, False)) await asyncio.gather(*init_tasks) def device_joined(self, device): """Handle device joined. At this point, no information about the device is known other than its address """ async_dispatcher_send( self._hass, ZHA_GW_MSG, { TYPE: DEVICE_JOINED, NWK: device.nwk, IEEE: str(device.ieee) } ) def raw_device_initialized(self, device): """Handle a device initialization without quirks loaded.""" endpoint_ids = device.endpoints.keys() ept_id = next((ept_id for ept_id in endpoint_ids if ept_id != 0), None) manufacturer = 'Unknown' model = 'Unknown' if ept_id is not None: manufacturer = device.endpoints[ept_id].manufacturer model = device.endpoints[ept_id].model async_dispatcher_send( self._hass, ZHA_GW_MSG, { TYPE: RAW_INIT, NWK: device.nwk, IEEE: str(device.ieee), MODEL: model, ATTR_MANUFACTURER: manufacturer, SIGNATURE: device.get_signature() } ) def device_initialized(self, device): """Handle device joined and basic information discovered.""" self._hass.async_create_task( self.async_device_initialized(device, True)) def device_left(self, device): """Handle device leaving the network.""" pass def device_removed(self, device): """Handle device being removed from the network.""" zha_device = self._devices.pop(device.ieee, None) self._device_registry.pop(device.ieee, None) if zha_device is not None: device_info = async_get_device_info(self._hass, zha_device) self._hass.async_create_task(zha_device.async_unsub_dispatcher()) async_dispatcher_send( self._hass, "{}_{}".format(SIGNAL_REMOVE, str(zha_device.ieee)) ) if device_info is not None: async_dispatcher_send( self._hass, ZHA_GW_MSG, { TYPE: DEVICE_REMOVED, DEVICE_INFO: device_info } ) def get_device(self, ieee): """Return ZHADevice for given ieee.""" return self._devices.get(ieee) def get_entity_reference(self, entity_id): """Return entity reference for given entity_id if found.""" for entity_reference in itertools.chain.from_iterable( self.device_registry.values()): if entity_id == entity_reference.reference_id: return entity_reference @property def devices(self): """Return devices.""" return self._devices @property def device_registry(self): """Return entities by ieee.""" return self._device_registry def register_entity_reference( self, ieee, reference_id, zha_device, cluster_channels, device_info): """Record the creation of a hass entity associated with ieee.""" self._device_registry[ieee].append( EntityReference( reference_id=reference_id, zha_device=zha_device, cluster_channels=cluster_channels, device_info=device_info ) ) @callback def async_enable_debug_mode(self): """Enable debug mode for ZHA.""" self._log_levels[ORIGINAL] = async_capture_log_levels() async_set_logger_levels(DEBUG_LEVELS) self._log_levels[CURRENT] = async_capture_log_levels() for logger_name in ADD_DEVICE_RELAY_LOGGERS: logging.getLogger(logger_name).addHandler(self._log_relay_handler) self.debug_enabled = True @callback def async_disable_debug_mode(self): """Disable debug mode for ZHA.""" async_set_logger_levels(self._log_levels[ORIGINAL]) self._log_levels[CURRENT] = async_capture_log_levels() for logger_name in ADD_DEVICE_RELAY_LOGGERS: logging.getLogger(logger_name).removeHandler( self._log_relay_handler) self.debug_enabled = False @callback def _async_get_or_create_device(self, zigpy_device, is_new_join): """Get or create a ZHA device.""" zha_device = self._devices.get(zigpy_device.ieee) if zha_device is None: zha_device = ZHADevice(self._hass, zigpy_device, self) self._devices[zigpy_device.ieee] = zha_device if not is_new_join: entry = self.zha_storage.async_get_or_create(zha_device) zha_device.async_update_last_seen(entry.last_seen) zha_device.set_power_source(entry.power_source) return zha_device @callback def async_device_became_available( self, sender, is_reply, profile, cluster, src_ep, dst_ep, tsn, command_id, args): """Handle tasks when a device becomes available.""" self.async_update_device(sender) @callback def async_update_device(self, sender): """Update device that has just become available.""" if sender.ieee in self.devices: device = self.devices[sender.ieee] # avoid a race condition during new joins if device.status is DeviceStatus.INITIALIZED: device.update_available(True) async def async_update_device_storage(self): """Update the devices in the store.""" for device in self.devices.values(): self.zha_storage.async_update(device) await self.zha_storage.async_save() async def async_device_initialized(self, device, is_new_join): """Handle device joined and basic information discovered (async).""" zha_device = self._async_get_or_create_device(device, is_new_join) discovery_infos = [] for endpoint_id, endpoint in device.endpoints.items(): async_process_endpoint( self._hass, self._config, endpoint_id, endpoint, discovery_infos, device, zha_device, is_new_join ) if endpoint_id != 0: for cluster in endpoint.in_clusters.values(): cluster.bind_only = False for cluster in endpoint.out_clusters.values(): cluster.bind_only = True if is_new_join: # configure the device await zha_device.async_configure() zha_device.update_available(True) elif zha_device.power_source is not None\ and zha_device.power_source == MAINS_POWERED: # the device isn't a battery powered device so we should be able # to update it now _LOGGER.debug( "attempting to request fresh state for %s %s", zha_device.name, "with power source: {}".format( ZDOChannel.POWER_SOURCES.get(zha_device.power_source) ) ) await zha_device.async_initialize(from_cache=False) else: await zha_device.async_initialize(from_cache=True) for discovery_info in discovery_infos: async_dispatch_discovery_info( self._hass, is_new_join, discovery_info ) device_entity = async_create_device_entity(zha_device) await self._component.async_add_entities([device_entity]) if is_new_join: device_info = async_get_device_info(self._hass, zha_device) async_dispatcher_send( self._hass, ZHA_GW_MSG, { TYPE: DEVICE_FULL_INIT, DEVICE_INFO: device_info } ) async def shutdown(self): """Stop ZHA Controller Application.""" _LOGGER.debug("Shutting down ZHA ControllerApplication") await self.application_controller.shutdown() @callback def async_capture_log_levels(): """Capture current logger levels for ZHA.""" return { BELLOWS: logging.getLogger(BELLOWS).getEffectiveLevel(), ZHA: logging.getLogger(ZHA).getEffectiveLevel(), ZIGPY: logging.getLogger(ZIGPY).getEffectiveLevel(), ZIGPY_XBEE: logging.getLogger(ZIGPY_XBEE).getEffectiveLevel(), ZIGPY_DECONZ: logging.getLogger(ZIGPY_DECONZ).getEffectiveLevel(), } @callback def async_set_logger_levels(levels): """Set logger levels for ZHA.""" logging.getLogger(BELLOWS).setLevel(levels[BELLOWS]) logging.getLogger(ZHA).setLevel(levels[ZHA]) logging.getLogger(ZIGPY).setLevel(levels[ZIGPY]) logging.getLogger(ZIGPY_XBEE).setLevel(levels[ZIGPY_XBEE]) logging.getLogger(ZIGPY_DECONZ).setLevel(levels[ZIGPY_DECONZ]) class LogRelayHandler(logging.Handler): """Log handler for error messages.""" def __init__(self, hass, gateway): """Initialize a new LogErrorHandler.""" super().__init__() self.hass = hass self.gateway = gateway def emit(self, record): """Relay log message via dispatcher.""" stack = [] if record.levelno >= logging.WARN: if not record.exc_info: stack = [f for f, _, _, _ in traceback.extract_stack()] entry = LogEntry(record, stack, _figure_out_source(record, stack, self.hass)) async_dispatcher_send( self.hass, ZHA_GW_MSG, { TYPE: LOG_OUTPUT, LOG_ENTRY: entry.to_dict() } )
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/zha/core/gateway.py
"""Provide functionality to TTS.""" import asyncio import ctypes import functools as ft import hashlib import io import logging import mimetypes import os import re from aiohttp import web import voluptuous as vol from homeassistant.components.http import HomeAssistantView from homeassistant.components.media_player.const import ( ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, MEDIA_TYPE_MUSIC, SERVICE_PLAY_MEDIA) from homeassistant.components.media_player.const import DOMAIN as DOMAIN_MP from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, CONF_PLATFORM from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_per_platform import homeassistant.helpers.config_validation as cv from homeassistant.setup import async_prepare_setup_platform _LOGGER = logging.getLogger(__name__) ATTR_CACHE = 'cache' ATTR_LANGUAGE = 'language' ATTR_MESSAGE = 'message' ATTR_OPTIONS = 'options' ATTR_PLATFORM = 'platform' CONF_BASE_URL = 'base_url' CONF_CACHE = 'cache' CONF_CACHE_DIR = 'cache_dir' CONF_LANG = 'language' CONF_SERVICE_NAME = 'service_name' CONF_TIME_MEMORY = 'time_memory' DEFAULT_CACHE = True DEFAULT_CACHE_DIR = 'tts' DEFAULT_TIME_MEMORY = 300 DOMAIN = 'tts' MEM_CACHE_FILENAME = 'filename' MEM_CACHE_VOICE = 'voice' SERVICE_CLEAR_CACHE = 'clear_cache' SERVICE_SAY = 'say' _RE_VOICE_FILE = re.compile( r"([a-f0-9]{40})_([^_]+)_([^_]+)_([a-z_]+)\.[a-z0-9]{3,4}") KEY_PATTERN = '{0}_{1}_{2}_{3}' def _deprecated_platform(value): """Validate if platform is deprecated.""" if value == 'google': raise vol.Invalid( 'google tts service has been renamed to google_translate,' ' please update your configuration.') return value PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({ vol.Required(CONF_PLATFORM): vol.All(cv.string, _deprecated_platform), vol.Optional(CONF_CACHE, default=DEFAULT_CACHE): cv.boolean, vol.Optional(CONF_CACHE_DIR, default=DEFAULT_CACHE_DIR): cv.string, vol.Optional(CONF_TIME_MEMORY, default=DEFAULT_TIME_MEMORY): vol.All(vol.Coerce(int), vol.Range(min=60, max=57600)), vol.Optional(CONF_BASE_URL): cv.string, vol.Optional(CONF_SERVICE_NAME): cv.string, }) PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE.extend(PLATFORM_SCHEMA.schema) SCHEMA_SERVICE_SAY = vol.Schema({ vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_CACHE): cv.boolean, vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, vol.Optional(ATTR_LANGUAGE): cv.string, vol.Optional(ATTR_OPTIONS): dict, }) SCHEMA_SERVICE_CLEAR_CACHE = vol.Schema({}) async def async_setup(hass, config): """Set up TTS.""" tts = SpeechManager(hass) try: conf = config[DOMAIN][0] if config.get(DOMAIN, []) else {} use_cache = conf.get(CONF_CACHE, DEFAULT_CACHE) cache_dir = conf.get(CONF_CACHE_DIR, DEFAULT_CACHE_DIR) time_memory = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY) base_url = conf.get(CONF_BASE_URL) or hass.config.api.base_url await tts.async_init_cache(use_cache, cache_dir, time_memory, base_url) except (HomeAssistantError, KeyError) as err: _LOGGER.error("Error on cache init %s", err) return False hass.http.register_view(TextToSpeechView(tts)) hass.http.register_view(TextToSpeechUrlView(tts)) async def async_setup_platform(p_type, p_config, disc_info=None): """Set up a TTS platform.""" platform = await async_prepare_setup_platform( hass, config, DOMAIN, p_type) if platform is None: return try: if hasattr(platform, 'async_get_engine'): provider = await platform.async_get_engine( hass, p_config) else: provider = await hass.async_add_job( platform.get_engine, hass, p_config) if provider is None: _LOGGER.error("Error setting up platform %s", p_type) return tts.async_register_engine(p_type, provider, p_config) except Exception: # pylint: disable=broad-except _LOGGER.exception("Error setting up platform: %s", p_type) return async def async_say_handle(service): """Service handle for say.""" entity_ids = service.data.get(ATTR_ENTITY_ID, ENTITY_MATCH_ALL) message = service.data.get(ATTR_MESSAGE) cache = service.data.get(ATTR_CACHE) language = service.data.get(ATTR_LANGUAGE) options = service.data.get(ATTR_OPTIONS) try: url = await tts.async_get_url( p_type, message, cache=cache, language=language, options=options ) except HomeAssistantError as err: _LOGGER.error("Error on init TTS: %s", err) return data = { ATTR_MEDIA_CONTENT_ID: url, ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC, ATTR_ENTITY_ID: entity_ids, } await hass.services.async_call( DOMAIN_MP, SERVICE_PLAY_MEDIA, data, blocking=True) service_name = p_config.get(CONF_SERVICE_NAME, "{}_{}".format( p_type, SERVICE_SAY)) hass.services.async_register( DOMAIN, service_name, async_say_handle, schema=SCHEMA_SERVICE_SAY) setup_tasks = [async_setup_platform(p_type, p_config) for p_type, p_config in config_per_platform(config, DOMAIN)] if setup_tasks: await asyncio.wait(setup_tasks, loop=hass.loop) async def async_clear_cache_handle(service): """Handle clear cache service call.""" await tts.async_clear_cache() hass.services.async_register( DOMAIN, SERVICE_CLEAR_CACHE, async_clear_cache_handle, schema=SCHEMA_SERVICE_CLEAR_CACHE) return True class SpeechManager: """Representation of a speech store.""" def __init__(self, hass): """Initialize a speech store.""" self.hass = hass self.providers = {} self.use_cache = DEFAULT_CACHE self.cache_dir = DEFAULT_CACHE_DIR self.time_memory = DEFAULT_TIME_MEMORY self.base_url = None self.file_cache = {} self.mem_cache = {} async def async_init_cache(self, use_cache, cache_dir, time_memory, base_url): """Init config folder and load file cache.""" self.use_cache = use_cache self.time_memory = time_memory self.base_url = base_url def init_tts_cache_dir(cache_dir): """Init cache folder.""" if not os.path.isabs(cache_dir): cache_dir = self.hass.config.path(cache_dir) if not os.path.isdir(cache_dir): _LOGGER.info("Create cache dir %s.", cache_dir) os.mkdir(cache_dir) return cache_dir try: self.cache_dir = await self.hass.async_add_job( init_tts_cache_dir, cache_dir) except OSError as err: raise HomeAssistantError("Can't init cache dir {}".format(err)) def get_cache_files(): """Return a dict of given engine files.""" cache = {} folder_data = os.listdir(self.cache_dir) for file_data in folder_data: record = _RE_VOICE_FILE.match(file_data) if record: key = KEY_PATTERN.format( record.group(1), record.group(2), record.group(3), record.group(4) ) cache[key.lower()] = file_data.lower() return cache try: cache_files = await self.hass.async_add_job(get_cache_files) except OSError as err: raise HomeAssistantError("Can't read cache dir {}".format(err)) if cache_files: self.file_cache.update(cache_files) async def async_clear_cache(self): """Read file cache and delete files.""" self.mem_cache = {} def remove_files(): """Remove files from filesystem.""" for _, filename in self.file_cache.items(): try: os.remove(os.path.join(self.cache_dir, filename)) except OSError as err: _LOGGER.warning( "Can't remove cache file '%s': %s", filename, err) await self.hass.async_add_job(remove_files) self.file_cache = {} @callback def async_register_engine(self, engine, provider, config): """Register a TTS provider.""" provider.hass = self.hass if provider.name is None: provider.name = engine self.providers[engine] = provider async def async_get_url(self, engine, message, cache=None, language=None, options=None): """Get URL for play message. This method is a coroutine. """ provider = self.providers[engine] msg_hash = hashlib.sha1(bytes(message, 'utf-8')).hexdigest() use_cache = cache if cache is not None else self.use_cache # Languages language = language or provider.default_language if language is None or \ language not in provider.supported_languages: raise HomeAssistantError("Not supported language {0}".format( language)) # Options if provider.default_options and options: merged_options = provider.default_options.copy() merged_options.update(options) options = merged_options options = options or provider.default_options if options is not None: invalid_opts = [opt_name for opt_name in options.keys() if opt_name not in (provider.supported_options or [])] if invalid_opts: raise HomeAssistantError( "Invalid options found: {}".format(invalid_opts)) options_key = ctypes.c_size_t(hash(frozenset(options))).value else: options_key = '-' key = KEY_PATTERN.format( msg_hash, language, options_key, engine).lower() # Is speech already in memory if key in self.mem_cache: filename = self.mem_cache[key][MEM_CACHE_FILENAME] # Is file store in file cache elif use_cache and key in self.file_cache: filename = self.file_cache[key] self.hass.async_create_task(self.async_file_to_mem(key)) # Load speech from provider into memory else: filename = await self.async_get_tts_audio( engine, key, message, use_cache, language, options) return "{}/api/tts_proxy/{}".format(self.base_url, filename) async def async_get_tts_audio( self, engine, key, message, cache, language, options): """Receive TTS and store for view in cache. This method is a coroutine. """ provider = self.providers[engine] extension, data = await provider.async_get_tts_audio( message, language, options) if data is None or extension is None: raise HomeAssistantError( "No TTS from {} for '{}'".format(engine, message)) # Create file infos filename = ("{}.{}".format(key, extension)).lower() data = self.write_tags( filename, data, provider, message, language, options) # Save to memory self._async_store_to_memcache(key, filename, data) if cache: self.hass.async_create_task( self.async_save_tts_audio(key, filename, data)) return filename async def async_save_tts_audio(self, key, filename, data): """Store voice data to file and file_cache. This method is a coroutine. """ voice_file = os.path.join(self.cache_dir, filename) def save_speech(): """Store speech to filesystem.""" with open(voice_file, 'wb') as speech: speech.write(data) try: await self.hass.async_add_job(save_speech) self.file_cache[key] = filename except OSError: _LOGGER.error("Can't write %s", filename) async def async_file_to_mem(self, key): """Load voice from file cache into memory. This method is a coroutine. """ filename = self.file_cache.get(key) if not filename: raise HomeAssistantError("Key {} not in file cache!".format(key)) voice_file = os.path.join(self.cache_dir, filename) def load_speech(): """Load a speech from filesystem.""" with open(voice_file, 'rb') as speech: return speech.read() try: data = await self.hass.async_add_job(load_speech) except OSError: del self.file_cache[key] raise HomeAssistantError("Can't read {}".format(voice_file)) self._async_store_to_memcache(key, filename, data) @callback def _async_store_to_memcache(self, key, filename, data): """Store data to memcache and set timer to remove it.""" self.mem_cache[key] = { MEM_CACHE_FILENAME: filename, MEM_CACHE_VOICE: data, } @callback def async_remove_from_mem(): """Cleanup memcache.""" self.mem_cache.pop(key) self.hass.loop.call_later(self.time_memory, async_remove_from_mem) async def async_read_tts(self, filename): """Read a voice file and return binary. This method is a coroutine. """ record = _RE_VOICE_FILE.match(filename.lower()) if not record: raise HomeAssistantError("Wrong tts file format!") key = KEY_PATTERN.format( record.group(1), record.group(2), record.group(3), record.group(4)) if key not in self.mem_cache: if key not in self.file_cache: raise HomeAssistantError("{} not in cache!".format(key)) await self.async_file_to_mem(key) content, _ = mimetypes.guess_type(filename) return (content, self.mem_cache[key][MEM_CACHE_VOICE]) @staticmethod def write_tags(filename, data, provider, message, language, options): """Write ID3 tags to file. Async friendly. """ import mutagen data_bytes = io.BytesIO(data) data_bytes.name = filename data_bytes.seek(0) album = provider.name artist = language if options is not None: if options.get('voice') is not None: artist = options.get('voice') try: tts_file = mutagen.File(data_bytes, easy=True) if tts_file is not None: tts_file['artist'] = artist tts_file['album'] = album tts_file['title'] = message tts_file.save(data_bytes) except mutagen.MutagenError as err: _LOGGER.error("ID3 tag error: %s", err) return data_bytes.getvalue() class Provider: """Represent a single TTS provider.""" hass = None name = None @property def default_language(self): """Return the default language.""" return None @property def supported_languages(self): """Return a list of supported languages.""" return None @property def supported_options(self): """Return a list of supported options like voice, emotionen.""" return None @property def default_options(self): """Return a dict include default options.""" return None def get_tts_audio(self, message, language, options=None): """Load tts audio file from provider.""" raise NotImplementedError() def async_get_tts_audio(self, message, language, options=None): """Load tts audio file from provider. Return a tuple of file extension and data as bytes. This method must be run in the event loop and returns a coroutine. """ return self.hass.async_add_job( ft.partial(self.get_tts_audio, message, language, options=options)) class TextToSpeechUrlView(HomeAssistantView): """TTS view to get a url to a generated speech file.""" requires_auth = True url = '/api/tts_get_url' name = 'api:tts:geturl' def __init__(self, tts): """Initialize a tts view.""" self.tts = tts async def post(self, request): """Generate speech and provide url.""" try: data = await request.json() except ValueError: return self.json_message('Invalid JSON specified', 400) if not data.get(ATTR_PLATFORM) and data.get(ATTR_MESSAGE): return self.json_message('Must specify platform and message', 400) p_type = data[ATTR_PLATFORM] message = data[ATTR_MESSAGE] cache = data.get(ATTR_CACHE) language = data.get(ATTR_LANGUAGE) options = data.get(ATTR_OPTIONS) try: url = await self.tts.async_get_url( p_type, message, cache=cache, language=language, options=options ) resp = self.json({'url': url}, 200) except HomeAssistantError as err: _LOGGER.error("Error on init tts: %s", err) resp = self.json({'error': err}, 400) return resp class TextToSpeechView(HomeAssistantView): """TTS view to serve a speech audio.""" requires_auth = False url = '/api/tts_proxy/{filename}' name = 'api:tts:speech' def __init__(self, tts): """Initialize a tts view.""" self.tts = tts async def get(self, request, filename): """Start a get request.""" try: content, data = await self.tts.async_read_tts(filename) except HomeAssistantError as err: _LOGGER.error("Error on load tts: %s", err) return web.Response(status=404) return web.Response(body=data, content_type=content)
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/tts/__init__.py
"""Support for Spider thermostats.""" import logging from homeassistant.components.climate import ClimateDevice from homeassistant.components.climate.const import ( STATE_COOL, STATE_HEAT, STATE_IDLE, SUPPORT_FAN_MODE, SUPPORT_OPERATION_MODE, SUPPORT_TARGET_TEMPERATURE) from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS from . import DOMAIN as SPIDER_DOMAIN FAN_LIST = [ 'Auto', 'Low', 'Medium', 'High', 'Boost 10', 'Boost 20', 'Boost 30', ] OPERATION_LIST = [ STATE_HEAT, STATE_COOL, ] HA_STATE_TO_SPIDER = { STATE_COOL: 'Cool', STATE_HEAT: 'Heat', STATE_IDLE: 'Idle', } SPIDER_STATE_TO_HA = {value: key for key, value in HA_STATE_TO_SPIDER.items()} _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Spider thermostat.""" if discovery_info is None: return devices = [SpiderThermostat(hass.data[SPIDER_DOMAIN]['controller'], device) for device in hass.data[SPIDER_DOMAIN]['thermostats']] add_entities(devices, True) class SpiderThermostat(ClimateDevice): """Representation of a thermostat.""" def __init__(self, api, thermostat): """Initialize the thermostat.""" self.api = api self.thermostat = thermostat @property def supported_features(self): """Return the list of supported features.""" supports = SUPPORT_TARGET_TEMPERATURE if self.thermostat.has_operation_mode: supports |= SUPPORT_OPERATION_MODE if self.thermostat.has_fan_mode: supports |= SUPPORT_FAN_MODE return supports @property def unique_id(self): """Return the id of the thermostat, if any.""" return self.thermostat.id @property def name(self): """Return the name of the thermostat, if any.""" return self.thermostat.name @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self.thermostat.current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self.thermostat.target_temperature @property def target_temperature_step(self): """Return the supported step of target temperature.""" return self.thermostat.temperature_steps @property def min_temp(self): """Return the minimum temperature.""" return self.thermostat.minimum_temperature @property def max_temp(self): """Return the maximum temperature.""" return self.thermostat.maximum_temperature @property def current_operation(self): """Return current operation ie. heat, cool, idle.""" return SPIDER_STATE_TO_HA[self.thermostat.operation_mode] @property def operation_list(self): """Return the list of available operation modes.""" return OPERATION_LIST def set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if temperature is None: return self.thermostat.set_temperature(temperature) def set_operation_mode(self, operation_mode): """Set new target operation mode.""" self.thermostat.set_operation_mode( HA_STATE_TO_SPIDER.get(operation_mode)) @property def current_fan_mode(self): """Return the fan setting.""" return self.thermostat.current_fan_speed def set_fan_mode(self, fan_mode): """Set fan mode.""" self.thermostat.set_fan_speed(fan_mode) @property def fan_list(self): """List of available fan modes.""" return FAN_LIST def update(self): """Get the latest data.""" self.thermostat = self.api.get_thermostat(self.unique_id)
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/spider/climate.py
"""Support for Honeywell Round Connected and Honeywell Evohome thermostats.""" import logging import datetime import requests import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.climate import ClimateDevice, PLATFORM_SCHEMA from homeassistant.components.climate.const import ( ATTR_FAN_MODE, ATTR_FAN_LIST, ATTR_OPERATION_MODE, ATTR_OPERATION_LIST, SUPPORT_TARGET_TEMPERATURE, SUPPORT_AWAY_MODE, SUPPORT_OPERATION_MODE) from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, TEMP_CELSIUS, TEMP_FAHRENHEIT, ATTR_TEMPERATURE, CONF_REGION) _LOGGER = logging.getLogger(__name__) ATTR_FAN = 'fan' ATTR_SYSTEM_MODE = 'system_mode' ATTR_CURRENT_OPERATION = 'equipment_output_status' CONF_AWAY_TEMPERATURE = 'away_temperature' CONF_COOL_AWAY_TEMPERATURE = 'away_cool_temperature' CONF_HEAT_AWAY_TEMPERATURE = 'away_heat_temperature' DEFAULT_AWAY_TEMPERATURE = 16 DEFAULT_COOL_AWAY_TEMPERATURE = 30 DEFAULT_HEAT_AWAY_TEMPERATURE = 16 DEFAULT_REGION = 'eu' REGIONS = ['eu', 'us'] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_AWAY_TEMPERATURE, default=DEFAULT_AWAY_TEMPERATURE): vol.Coerce(float), vol.Optional(CONF_COOL_AWAY_TEMPERATURE, default=DEFAULT_COOL_AWAY_TEMPERATURE): vol.Coerce(float), vol.Optional(CONF_HEAT_AWAY_TEMPERATURE, default=DEFAULT_HEAT_AWAY_TEMPERATURE): vol.Coerce(float), vol.Optional(CONF_REGION, default=DEFAULT_REGION): vol.In(REGIONS), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Honeywell thermostat.""" username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) region = config.get(CONF_REGION) if region == 'us': return _setup_us(username, password, config, add_entities) return _setup_round(username, password, config, add_entities) def _setup_round(username, password, config, add_entities): """Set up the rounding function.""" from evohomeclient import EvohomeClient away_temp = config.get(CONF_AWAY_TEMPERATURE) evo_api = EvohomeClient(username, password) try: zones = evo_api.temperatures(force_refresh=True) for i, zone in enumerate(zones): add_entities( [RoundThermostat(evo_api, zone['id'], i == 0, away_temp)], True ) except requests.exceptions.RequestException as err: _LOGGER.error( "Connection error logging into the honeywell evohome web service, " "hint: %s", err) return False return True # config will be used later def _setup_us(username, password, config, add_entities): """Set up the user.""" import somecomfort try: client = somecomfort.SomeComfort(username, password) except somecomfort.AuthError: _LOGGER.error("Failed to login to honeywell account %s", username) return False except somecomfort.SomeComfortError as ex: _LOGGER.error("Failed to initialize honeywell client: %s", str(ex)) return False dev_id = config.get('thermostat') loc_id = config.get('location') cool_away_temp = config.get(CONF_COOL_AWAY_TEMPERATURE) heat_away_temp = config.get(CONF_HEAT_AWAY_TEMPERATURE) add_entities([HoneywellUSThermostat(client, device, cool_away_temp, heat_away_temp, username, password) for location in client.locations_by_id.values() for device in location.devices_by_id.values() if ((not loc_id or location.locationid == loc_id) and (not dev_id or device.deviceid == dev_id))]) return True class RoundThermostat(ClimateDevice): """Representation of a Honeywell Round Connected thermostat.""" def __init__(self, client, zone_id, master, away_temp): """Initialize the thermostat.""" self.client = client self._current_temperature = None self._target_temperature = None self._name = 'round connected' self._id = zone_id self._master = master self._is_dhw = False self._away_temp = away_temp self._away = False @property def supported_features(self): """Return the list of supported features.""" supported = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_AWAY_MODE) if hasattr(self.client, ATTR_SYSTEM_MODE): supported |= SUPPORT_OPERATION_MODE return supported @property def name(self): """Return the name of the honeywell, if any.""" return self._name @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" if self._is_dhw: return None return self._target_temperature def set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if temperature is None: return self.client.set_temperature(self._name, temperature) @property def current_operation(self) -> str: """Get the current operation of the system.""" return getattr(self.client, ATTR_SYSTEM_MODE, None) @property def is_away_mode_on(self): """Return true if away mode is on.""" return self._away def set_operation_mode(self, operation_mode: str) -> None: """Set the HVAC mode for the thermostat.""" if hasattr(self.client, ATTR_SYSTEM_MODE): self.client.system_mode = operation_mode def turn_away_mode_on(self): """Turn away on. Honeywell does have a proprietary away mode, but it doesn't really work the way it should. For example: If you set a temperature manually it doesn't get overwritten when away mode is switched on. """ self._away = True self.client.set_temperature(self._name, self._away_temp) def turn_away_mode_off(self): """Turn away off.""" self._away = False self.client.cancel_temp_override(self._name) def update(self): """Get the latest date.""" try: # Only refresh if this is the "master" device, # others will pick up the cache for val in self.client.temperatures(force_refresh=self._master): if val['id'] == self._id: data = val except KeyError: _LOGGER.error("Update failed from Honeywell server") self.client.user_data = None return except StopIteration: _LOGGER.error("Did not receive any temperature data from the " "evohomeclient API") return self._current_temperature = data['temp'] self._target_temperature = data['setpoint'] if data['thermostat'] == 'DOMESTIC_HOT_WATER': self._name = 'Hot Water' self._is_dhw = True else: self._name = data['name'] self._is_dhw = False # The underlying library doesn't expose the thermostat's mode # but we can pull it out of the big dictionary of information. device = self.client.devices[self._id] self.client.system_mode = device[ 'thermostat']['changeableValues']['mode'] class HoneywellUSThermostat(ClimateDevice): """Representation of a Honeywell US Thermostat.""" def __init__(self, client, device, cool_away_temp, heat_away_temp, username, password): """Initialize the thermostat.""" self._client = client self._device = device self._cool_away_temp = cool_away_temp self._heat_away_temp = heat_away_temp self._away = False self._username = username self._password = password @property def supported_features(self): """Return the list of supported features.""" supported = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_AWAY_MODE) if hasattr(self._device, ATTR_SYSTEM_MODE): supported |= SUPPORT_OPERATION_MODE return supported @property def is_fan_on(self): """Return true if fan is on.""" return self._device.fan_running @property def name(self): """Return the name of the honeywell, if any.""" return self._device.name @property def temperature_unit(self): """Return the unit of measurement.""" return (TEMP_CELSIUS if self._device.temperature_unit == 'C' else TEMP_FAHRENHEIT) @property def current_temperature(self): """Return the current temperature.""" return self._device.current_temperature @property def current_humidity(self): """Return the current humidity.""" return self._device.current_humidity @property def target_temperature(self): """Return the temperature we try to reach.""" if self._device.system_mode == 'cool': return self._device.setpoint_cool return self._device.setpoint_heat @property def current_operation(self) -> str: """Return current operation ie. heat, cool, idle.""" oper = getattr(self._device, ATTR_CURRENT_OPERATION, None) if oper == "off": oper = "idle" return oper def set_temperature(self, **kwargs): """Set target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if temperature is None: return import somecomfort try: # Get current mode mode = self._device.system_mode # Set hold if this is not the case if getattr(self._device, "hold_{}".format(mode)) is False: # Get next period key next_period_key = '{}NextPeriod'.format(mode.capitalize()) # Get next period raw value next_period = self._device.raw_ui_data.get(next_period_key) # Get next period time hour, minute = divmod(next_period * 15, 60) # Set hold time setattr(self._device, "hold_{}".format(mode), datetime.time(hour, minute)) # Set temperature setattr(self._device, "setpoint_{}".format(mode), temperature) except somecomfort.SomeComfortError: _LOGGER.error("Temperature %.1f out of range", temperature) @property def device_state_attributes(self): """Return the device specific state attributes.""" import somecomfort data = { ATTR_FAN: (self.is_fan_on and 'running' or 'idle'), ATTR_FAN_MODE: self._device.fan_mode, ATTR_OPERATION_MODE: self._device.system_mode, } data[ATTR_FAN_LIST] = somecomfort.FAN_MODES data[ATTR_OPERATION_LIST] = somecomfort.SYSTEM_MODES return data @property def is_away_mode_on(self): """Return true if away mode is on.""" return self._away def turn_away_mode_on(self): """Turn away on. Somecomfort does have a proprietary away mode, but it doesn't really work the way it should. For example: If you set a temperature manually it doesn't get overwritten when away mode is switched on. """ self._away = True import somecomfort try: # Get current mode mode = self._device.system_mode except somecomfort.SomeComfortError: _LOGGER.error('Can not get system mode') return try: # Set permanent hold setattr(self._device, "hold_{}".format(mode), True) # Set temperature setattr(self._device, "setpoint_{}".format(mode), getattr(self, "_{}_away_temp".format(mode))) except somecomfort.SomeComfortError: _LOGGER.error('Temperature %.1f out of range', getattr(self, "_{}_away_temp".format(mode))) def turn_away_mode_off(self): """Turn away off.""" self._away = False import somecomfort try: # Disabling all hold modes self._device.hold_cool = False self._device.hold_heat = False except somecomfort.SomeComfortError: _LOGGER.error('Can not stop hold mode') def set_operation_mode(self, operation_mode: str) -> None: """Set the system mode (Cool, Heat, etc).""" if hasattr(self._device, ATTR_SYSTEM_MODE): self._device.system_mode = operation_mode def update(self): """Update the state.""" import somecomfort retries = 3 while retries > 0: try: self._device.refresh() break except (somecomfort.client.APIRateLimited, OSError, requests.exceptions.ReadTimeout) as exp: retries -= 1 if retries == 0: raise exp if not self._retry(): raise exp _LOGGER.error( "SomeComfort update failed, Retrying - Error: %s", exp) def _retry(self): """Recreate a new somecomfort client. When we got an error, the best way to be sure that the next query will succeed, is to recreate a new somecomfort client. """ import somecomfort try: self._client = somecomfort.SomeComfort( self._username, self._password) except somecomfort.AuthError: _LOGGER.error("Failed to login to honeywell account %s", self._username) return False except somecomfort.SomeComfortError as ex: _LOGGER.error("Failed to initialize honeywell client: %s", str(ex)) return False devices = [device for location in self._client.locations_by_id.values() for device in location.devices_by_id.values() if device.name == self._device.name] if len(devices) != 1: _LOGGER.error("Failed to find device %s", self._device.name) return False self._device = devices[0] return True
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/honeywell/climate.py
"""Support for Iperf3 network measurement tool.""" import logging from datetime import timedelta import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_PORT, \ CONF_HOST, CONF_PROTOCOL, CONF_HOSTS, CONF_SCAN_INTERVAL from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.event import async_track_time_interval DOMAIN = 'iperf3' DATA_UPDATED = '{}_data_updated'.format(DOMAIN) _LOGGER = logging.getLogger(__name__) CONF_DURATION = 'duration' CONF_PARALLEL = 'parallel' CONF_MANUAL = 'manual' DEFAULT_DURATION = 10 DEFAULT_PORT = 5201 DEFAULT_PARALLEL = 1 DEFAULT_PROTOCOL = 'tcp' DEFAULT_INTERVAL = timedelta(minutes=60) ATTR_DOWNLOAD = 'download' ATTR_UPLOAD = 'upload' ATTR_VERSION = 'Version' ATTR_HOST = 'host' UNIT_OF_MEASUREMENT = 'Mbit/s' SENSOR_TYPES = { ATTR_DOWNLOAD: [ATTR_DOWNLOAD.capitalize(), UNIT_OF_MEASUREMENT], ATTR_UPLOAD: [ATTR_UPLOAD.capitalize(), UNIT_OF_MEASUREMENT], } PROTOCOLS = ['tcp', 'udp'] HOST_CONFIG_SCHEMA = vol.Schema({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_DURATION, default=DEFAULT_DURATION): vol.Range(5, 10), vol.Optional(CONF_PARALLEL, default=DEFAULT_PARALLEL): vol.Range(1, 20), vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.In(PROTOCOLS), }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_HOSTS): vol.All( cv.ensure_list, [HOST_CONFIG_SCHEMA] ), vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]), vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All( cv.time_period, cv.positive_timedelta ), vol.Optional(CONF_MANUAL, default=False): cv.boolean, }) }, extra=vol.ALLOW_EXTRA) SERVICE_SCHEMA = vol.Schema({ vol.Optional(ATTR_HOST, default=None): cv.string, }) async def async_setup(hass, config): """Set up the iperf3 component.""" import iperf3 hass.data[DOMAIN] = {} conf = config[DOMAIN] for host in conf[CONF_HOSTS]: host_name = host[CONF_HOST] client = iperf3.Client() client.duration = host[CONF_DURATION] client.server_hostname = host_name client.port = host[CONF_PORT] client.num_streams = host[CONF_PARALLEL] client.protocol = host[CONF_PROTOCOL] client.verbose = False data = hass.data[DOMAIN][host_name] = Iperf3Data(hass, client) if not conf[CONF_MANUAL]: async_track_time_interval( hass, data.update, conf[CONF_SCAN_INTERVAL] ) def update(call): """Service call to manually update the data.""" called_host = call.data[ATTR_HOST] if called_host in hass.data[DOMAIN]: hass.data[DOMAIN][called_host].update() else: for iperf3_host in hass.data[DOMAIN].values(): iperf3_host.update() hass.services.async_register( DOMAIN, 'speedtest', update, schema=SERVICE_SCHEMA ) hass.async_create_task( async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, conf[CONF_MONITORED_CONDITIONS], config ) ) return True class Iperf3Data: """Get the latest data from iperf3.""" def __init__(self, hass, client): """Initialize the data object.""" self._hass = hass self._client = client self.data = { ATTR_DOWNLOAD: None, ATTR_UPLOAD: None, ATTR_VERSION: None } @property def protocol(self): """Return the protocol used for this connection.""" return self._client.protocol @property def host(self): """Return the host connected to.""" return self._client.server_hostname @property def port(self): """Return the port on the host connected to.""" return self._client.port def update(self, now=None): """Get the latest data from iperf3.""" if self.protocol == 'udp': # UDP only have 1 way attribute result = self._run_test(ATTR_DOWNLOAD) self.data[ATTR_DOWNLOAD] = self.data[ATTR_UPLOAD] = getattr( result, 'Mbps', None) self.data[ATTR_VERSION] = getattr(result, 'version', None) else: result = self._run_test(ATTR_DOWNLOAD) self.data[ATTR_DOWNLOAD] = getattr( result, 'received_Mbps', None) self.data[ATTR_VERSION] = getattr(result, 'version', None) self.data[ATTR_UPLOAD] = getattr( self._run_test(ATTR_UPLOAD), 'sent_Mbps', None) dispatcher_send(self._hass, DATA_UPDATED, self.host) def _run_test(self, test_type): """Run and return the iperf3 data.""" self._client.reverse = test_type == ATTR_DOWNLOAD try: result = self._client.run() except (AttributeError, OSError, ValueError) as error: _LOGGER.error("Iperf3 error: %s", error) return None if result is not None and \ hasattr(result, 'error') and \ result.error is not None: _LOGGER.error("Iperf3 error: %s", result.error) return None return result
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/iperf3/__init__.py
"""Support for Hydrawise sprinkler sensors.""" import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_MONITORED_CONDITIONS import homeassistant.helpers.config_validation as cv from . import ( DATA_HYDRAWISE, DEVICE_MAP, DEVICE_MAP_INDEX, SENSORS, HydrawiseEntity) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSORS): vol.All(cv.ensure_list, [vol.In(SENSORS)]), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a sensor for a Hydrawise device.""" hydrawise = hass.data[DATA_HYDRAWISE].data sensors = [] for sensor_type in config.get(CONF_MONITORED_CONDITIONS): for zone in hydrawise.relays: sensors.append(HydrawiseSensor(zone, sensor_type)) add_entities(sensors, True) class HydrawiseSensor(HydrawiseEntity): """A sensor implementation for Hydrawise device.""" @property def state(self): """Return the state of the sensor.""" return self._state def update(self): """Get the latest data and updates the states.""" mydata = self.hass.data[DATA_HYDRAWISE].data _LOGGER.debug("Updating Hydrawise sensor: %s", self._name) if self._sensor_type == 'watering_time': if not mydata.running: self._state = 0 else: if int(mydata.running[0]['relay']) == self.data['relay']: self._state = int(mydata.running[0]['time_left']/60) else: self._state = 0 else: # _sensor_type == 'next_cycle' for relay in mydata.relays: if relay['relay'] == self.data['relay']: if relay['nicetime'] == 'Not scheduled': self._state = 'not_scheduled' else: self._state = relay['nicetime'].split(',')[0] + \ ' ' + relay['nicetime'].split(' ')[3] @property def icon(self): """Icon to use in the frontend, if any.""" return DEVICE_MAP[self._sensor_type][ DEVICE_MAP_INDEX.index('ICON_INDEX')]
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/hydrawise/sensor.py
"""Support for MQTT switches.""" import logging import voluptuous as vol from homeassistant.components import mqtt, switch from homeassistant.components.switch import SwitchDevice from homeassistant.const import ( CONF_DEVICE, CONF_ICON, CONF_NAME, CONF_OPTIMISTIC, CONF_PAYLOAD_OFF, CONF_PAYLOAD_ON, CONF_VALUE_TEMPLATE, STATE_ON) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, HomeAssistantType from . import ( ATTR_DISCOVERY_HASH, CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC, CONF_UNIQUE_ID, MqttAttributes, MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo, subscription) from .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'MQTT Switch' DEFAULT_PAYLOAD_ON = 'ON' DEFAULT_PAYLOAD_OFF = 'OFF' DEFAULT_OPTIMISTIC = False CONF_STATE_ON = "state_on" CONF_STATE_OFF = "state_off" PLATFORM_SCHEMA = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({ vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA, vol.Optional(CONF_ICON): cv.icon, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean, vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string, vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string, vol.Optional(CONF_STATE_OFF): cv.string, vol.Optional(CONF_STATE_ON): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, }).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema).extend( mqtt.MQTT_JSON_ATTRS_SCHEMA.schema) async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None): """Set up MQTT switch through configuration.yaml.""" await _async_setup_entity(config, async_add_entities, discovery_info) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up MQTT switch dynamically through MQTT discovery.""" async def async_discover(discovery_payload): """Discover and add a MQTT switch.""" try: discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH) config = PLATFORM_SCHEMA(discovery_payload) await _async_setup_entity(config, async_add_entities, config_entry, discovery_hash) except Exception: if discovery_hash: clear_discovery_hash(hass, discovery_hash) raise async_dispatcher_connect( hass, MQTT_DISCOVERY_NEW.format(switch.DOMAIN, 'mqtt'), async_discover) async def _async_setup_entity(config, async_add_entities, config_entry=None, discovery_hash=None): """Set up the MQTT switch.""" async_add_entities([MqttSwitch(config, config_entry, discovery_hash)]) # pylint: disable=too-many-ancestors class MqttSwitch(MqttAttributes, MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo, SwitchDevice, RestoreEntity): """Representation of a switch that can be toggled using MQTT.""" def __init__(self, config, config_entry, discovery_hash): """Initialize the MQTT switch.""" self._state = False self._sub_state = None self._state_on = None self._state_off = None self._optimistic = None self._unique_id = config.get(CONF_UNIQUE_ID) # Load config self._setup_from_config(config) device_config = config.get(CONF_DEVICE) MqttAttributes.__init__(self, config) MqttAvailability.__init__(self, config) MqttDiscoveryUpdate.__init__(self, discovery_hash, self.discovery_update) MqttEntityDeviceInfo.__init__(self, device_config, config_entry) async def async_added_to_hass(self): """Subscribe to MQTT events.""" await super().async_added_to_hass() await self._subscribe_topics() async def discovery_update(self, discovery_payload): """Handle updated discovery message.""" config = PLATFORM_SCHEMA(discovery_payload) self._setup_from_config(config) await self.attributes_discovery_update(config) await self.availability_discovery_update(config) await self.device_info_discovery_update(config) await self._subscribe_topics() self.async_write_ha_state() def _setup_from_config(self, config): """(Re)Setup the entity.""" self._config = config state_on = config.get(CONF_STATE_ON) self._state_on = state_on if state_on else config[CONF_PAYLOAD_ON] state_off = config.get(CONF_STATE_OFF) self._state_off = state_off if state_off else \ config[CONF_PAYLOAD_OFF] self._optimistic = config[CONF_OPTIMISTIC] async def _subscribe_topics(self): """(Re)Subscribe to topics.""" template = self._config.get(CONF_VALUE_TEMPLATE) if template is not None: template.hass = self.hass @callback def state_message_received(msg): """Handle new MQTT state messages.""" payload = msg.payload if template is not None: payload = template.async_render_with_possible_json_value( payload) if payload == self._state_on: self._state = True elif payload == self._state_off: self._state = False self.async_write_ha_state() if self._config.get(CONF_STATE_TOPIC) is None: # Force into optimistic mode. self._optimistic = True else: self._sub_state = await subscription.async_subscribe_topics( self.hass, self._sub_state, {CONF_STATE_TOPIC: {'topic': self._config.get(CONF_STATE_TOPIC), 'msg_callback': state_message_received, 'qos': self._config[CONF_QOS]}}) if self._optimistic: last_state = await self.async_get_last_state() if last_state: self._state = last_state.state == STATE_ON async def async_will_remove_from_hass(self): """Unsubscribe when removed.""" self._sub_state = await subscription.async_unsubscribe_topics( self.hass, self._sub_state) await MqttAttributes.async_will_remove_from_hass(self) await MqttAvailability.async_will_remove_from_hass(self) @property def should_poll(self): """Return the polling state.""" return False @property def name(self): """Return the name of the switch.""" return self._config[CONF_NAME] @property def is_on(self): """Return true if device is on.""" return self._state @property def assumed_state(self): """Return true if we do optimistic updates.""" return self._optimistic @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def icon(self): """Return the icon.""" return self._config.get(CONF_ICON) async def async_turn_on(self, **kwargs): """Turn the device on. This method is a coroutine. """ mqtt.async_publish( self.hass, self._config[CONF_COMMAND_TOPIC], self._config[CONF_PAYLOAD_ON], self._config[CONF_QOS], self._config[CONF_RETAIN]) if self._optimistic: # Optimistically assume that switch has changed state. self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the device off. This method is a coroutine. """ mqtt.async_publish( self.hass, self._config[CONF_COMMAND_TOPIC], self._config[CONF_PAYLOAD_OFF], self._config[CONF_QOS], self._config[CONF_RETAIN]) if self._optimistic: # Optimistically assume that switch has changed state. self._state = False self.async_write_ha_state()
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/mqtt/switch.py
"""Support for NuHeat thermostats.""" from datetime import timedelta import logging import voluptuous as vol from homeassistant.components.climate import ClimateDevice from homeassistant.components.climate.const import ( DOMAIN, STATE_AUTO, STATE_HEAT, STATE_IDLE, SUPPORT_HOLD_MODE, SUPPORT_OPERATION_MODE, SUPPORT_TARGET_TEMPERATURE) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT) import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle from . import DOMAIN as NUHEAT_DOMAIN _LOGGER = logging.getLogger(__name__) ICON = "mdi:thermometer" MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5) # Hold modes MODE_AUTO = STATE_AUTO # Run device schedule MODE_HOLD_TEMPERATURE = "temperature" MODE_TEMPORARY_HOLD = "temporary_temperature" OPERATION_LIST = [STATE_HEAT, STATE_IDLE] SCHEDULE_HOLD = 3 SCHEDULE_RUN = 1 SCHEDULE_TEMPORARY_HOLD = 2 SERVICE_RESUME_PROGRAM = "nuheat_resume_program" RESUME_PROGRAM_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids }) SUPPORT_FLAGS = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_HOLD_MODE | SUPPORT_OPERATION_MODE) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the NuHeat thermostat(s).""" if discovery_info is None: return temperature_unit = hass.config.units.temperature_unit api, serial_numbers = hass.data[NUHEAT_DOMAIN] thermostats = [ NuHeatThermostat(api, serial_number, temperature_unit) for serial_number in serial_numbers ] add_entities(thermostats, True) def resume_program_set_service(service): """Resume the program on the target thermostats.""" entity_id = service.data.get(ATTR_ENTITY_ID) if entity_id: target_thermostats = [device for device in thermostats if device.entity_id in entity_id] else: target_thermostats = thermostats for thermostat in target_thermostats: thermostat.resume_program() thermostat.schedule_update_ha_state(True) hass.services.register( DOMAIN, SERVICE_RESUME_PROGRAM, resume_program_set_service, schema=RESUME_PROGRAM_SCHEMA) class NuHeatThermostat(ClimateDevice): """Representation of a NuHeat Thermostat.""" def __init__(self, api, serial_number, temperature_unit): """Initialize the thermostat.""" self._thermostat = api.get_thermostat(serial_number) self._temperature_unit = temperature_unit self._force_update = False @property def name(self): """Return the name of the thermostat.""" return self._thermostat.room @property def icon(self): """Return the icon to use in the frontend.""" return ICON @property def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS @property def temperature_unit(self): """Return the unit of measurement.""" if self._temperature_unit == "C": return TEMP_CELSIUS return TEMP_FAHRENHEIT @property def current_temperature(self): """Return the current temperature.""" if self._temperature_unit == "C": return self._thermostat.celsius return self._thermostat.fahrenheit @property def current_operation(self): """Return current operation. ie. heat, idle.""" if self._thermostat.heating: return STATE_HEAT return STATE_IDLE @property def min_temp(self): """Return the minimum supported temperature for the thermostat.""" if self._temperature_unit == "C": return self._thermostat.min_celsius return self._thermostat.min_fahrenheit @property def max_temp(self): """Return the maximum supported temperature for the thermostat.""" if self._temperature_unit == "C": return self._thermostat.max_celsius return self._thermostat.max_fahrenheit @property def target_temperature(self): """Return the currently programmed temperature.""" if self._temperature_unit == "C": return self._thermostat.target_celsius return self._thermostat.target_fahrenheit @property def current_hold_mode(self): """Return current hold mode.""" schedule_mode = self._thermostat.schedule_mode if schedule_mode == SCHEDULE_RUN: return MODE_AUTO if schedule_mode == SCHEDULE_HOLD: return MODE_HOLD_TEMPERATURE if schedule_mode == SCHEDULE_TEMPORARY_HOLD: return MODE_TEMPORARY_HOLD return MODE_AUTO @property def operation_list(self): """Return list of possible operation modes.""" return OPERATION_LIST def resume_program(self): """Resume the thermostat's programmed schedule.""" self._thermostat.resume_schedule() self._force_update = True def set_hold_mode(self, hold_mode): """Update the hold mode of the thermostat.""" if hold_mode == MODE_AUTO: schedule_mode = SCHEDULE_RUN if hold_mode == MODE_HOLD_TEMPERATURE: schedule_mode = SCHEDULE_HOLD if hold_mode == MODE_TEMPORARY_HOLD: schedule_mode = SCHEDULE_TEMPORARY_HOLD self._thermostat.schedule_mode = schedule_mode self._force_update = True def set_temperature(self, **kwargs): """Set a new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if self._temperature_unit == "C": self._thermostat.target_celsius = temperature else: self._thermostat.target_fahrenheit = temperature _LOGGER.debug( "Setting NuHeat thermostat temperature to %s %s", temperature, self.temperature_unit) self._force_update = True def update(self): """Get the latest state from the thermostat.""" if self._force_update: self._throttled_update(no_throttle=True) self._force_update = False else: self._throttled_update() @Throttle(MIN_TIME_BETWEEN_UPDATES) def _throttled_update(self, **kwargs): """Get the latest state from the thermostat with a throttle.""" self._thermostat.get_data()
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/nuheat/climate.py
"""Allow users to set and activate scenes.""" from collections import namedtuple import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_STATE, CONF_ENTITIES, CONF_NAME, CONF_PLATFORM, STATE_OFF, STATE_ON) from homeassistant.core import State import homeassistant.helpers.config_validation as cv from homeassistant.helpers.state import HASS_DOMAIN, async_reproduce_state from homeassistant.components.scene import STATES, Scene PLATFORM_SCHEMA = vol.Schema({ vol.Required(CONF_PLATFORM): HASS_DOMAIN, vol.Required(STATES): vol.All( cv.ensure_list, [ { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_ENTITIES): { cv.entity_id: vol.Any(str, bool, dict) }, } ] ), }, extra=vol.ALLOW_EXTRA) SCENECONFIG = namedtuple('SceneConfig', [CONF_NAME, STATES]) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up home assistant scene entries.""" scene_config = config.get(STATES) async_add_entities(HomeAssistantScene( hass, _process_config(scene)) for scene in scene_config) return True def _process_config(scene_config): """Process passed in config into a format to work with. Async friendly. """ name = scene_config.get(CONF_NAME) states = {} c_entities = dict(scene_config.get(CONF_ENTITIES, {})) for entity_id in c_entities: if isinstance(c_entities[entity_id], dict): entity_attrs = c_entities[entity_id].copy() state = entity_attrs.pop(ATTR_STATE, None) attributes = entity_attrs else: state = c_entities[entity_id] attributes = {} # YAML translates 'on' to a boolean # http://yaml.org/type/bool.html if isinstance(state, bool): state = STATE_ON if state else STATE_OFF else: state = str(state) states[entity_id.lower()] = State(entity_id, state, attributes) return SCENECONFIG(name, states) class HomeAssistantScene(Scene): """A scene is a group of entities and the states we want them to be.""" def __init__(self, hass, scene_config): """Initialize the scene.""" self.hass = hass self.scene_config = scene_config @property def name(self): """Return the name of the scene.""" return self.scene_config.name @property def device_state_attributes(self): """Return the scene state attributes.""" return { ATTR_ENTITY_ID: list(self.scene_config.states.keys()), } async def async_activate(self): """Activate scene. Try to get entities into requested state.""" await async_reproduce_state( self.hass, self.scene_config.states.values(), True)
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/homeassistant/scene.py
"""Family Hub camera for Samsung Refrigerators.""" import logging import voluptuous as vol from homeassistant.components.camera import Camera, PLATFORM_SCHEMA from homeassistant.const import CONF_IP_ADDRESS, CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'FamilyHub Camera' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_IP_ADDRESS): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the Family Hub Camera.""" from pyfamilyhublocal import FamilyHubCam address = config.get(CONF_IP_ADDRESS) name = config.get(CONF_NAME) session = async_get_clientsession(hass) family_hub_cam = FamilyHubCam(address, hass.loop, session) async_add_entities([FamilyHubCamera(name, family_hub_cam)], True) class FamilyHubCamera(Camera): """The representation of a Family Hub camera.""" def __init__(self, name, family_hub_cam): """Initialize camera component.""" super().__init__() self._name = name self.family_hub_cam = family_hub_cam async def async_camera_image(self): """Return a still image response.""" return await self.family_hub_cam.async_get_cam_image() @property def name(self): """Return the name of this camera.""" return self._name
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/familyhub/camera.py
"""Support for the TrackR platform.""" import logging import voluptuous as vol from homeassistant.components.device_tracker import PLATFORM_SCHEMA from homeassistant.const import CONF_USERNAME, CONF_PASSWORD import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import track_utc_time_change from homeassistant.util import slugify _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string }) def setup_scanner(hass, config: dict, see, discovery_info=None): """Validate the configuration and return a TrackR scanner.""" TrackRDeviceScanner(hass, config, see) return True class TrackRDeviceScanner: """A class representing a TrackR device.""" def __init__(self, hass, config: dict, see) -> None: """Initialize the TrackR device scanner.""" from pytrackr.api import trackrApiInterface self.hass = hass self.api = trackrApiInterface( config.get(CONF_USERNAME), config.get(CONF_PASSWORD)) self.see = see self.devices = self.api.get_trackrs() self._update_info() track_utc_time_change( self.hass, self._update_info, second=range(0, 60, 30)) def _update_info(self, now=None) -> None: """Update the device info.""" _LOGGER.debug("Updating devices %s", now) # Update self.devices to collect new devices added # to the users account. self.devices = self.api.get_trackrs() for trackr in self.devices: trackr.update_state() trackr_id = trackr.tracker_id() trackr_device_id = trackr.id() lost = trackr.lost() dev_id = slugify(trackr.name()) if dev_id is None: dev_id = trackr_id location = trackr.last_known_location() lat = location['latitude'] lon = location['longitude'] attrs = { 'last_updated': trackr.last_updated(), 'last_seen': trackr.last_time_seen(), 'trackr_id': trackr_id, 'id': trackr_device_id, 'lost': lost, 'battery_level': trackr.battery_level() } self.see( dev_id=dev_id, gps=(lat, lon), attributes=attrs )
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/trackr/device_tracker.py
"""Support to manage a shopping list.""" import asyncio import logging import uuid import voluptuous as vol from homeassistant.const import HTTP_NOT_FOUND, HTTP_BAD_REQUEST from homeassistant.core import callback from homeassistant.components import http from homeassistant.components.http.data_validator import ( RequestDataValidator) from homeassistant.helpers import intent import homeassistant.helpers.config_validation as cv from homeassistant.util.json import load_json, save_json from homeassistant.components import websocket_api ATTR_NAME = 'name' DOMAIN = 'shopping_list' _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema({DOMAIN: {}}, extra=vol.ALLOW_EXTRA) EVENT = 'shopping_list_updated' INTENT_ADD_ITEM = 'HassShoppingListAddItem' INTENT_LAST_ITEMS = 'HassShoppingListLastItems' ITEM_UPDATE_SCHEMA = vol.Schema({ 'complete': bool, ATTR_NAME: str, }) PERSISTENCE = '.shopping_list.json' SERVICE_ADD_ITEM = 'add_item' SERVICE_COMPLETE_ITEM = 'complete_item' SERVICE_ITEM_SCHEMA = vol.Schema({ vol.Required(ATTR_NAME): vol.Any(None, cv.string) }) WS_TYPE_SHOPPING_LIST_ITEMS = 'shopping_list/items' WS_TYPE_SHOPPING_LIST_ADD_ITEM = 'shopping_list/items/add' WS_TYPE_SHOPPING_LIST_UPDATE_ITEM = 'shopping_list/items/update' WS_TYPE_SHOPPING_LIST_CLEAR_ITEMS = 'shopping_list/items/clear' SCHEMA_WEBSOCKET_ITEMS = \ websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({ vol.Required('type'): WS_TYPE_SHOPPING_LIST_ITEMS }) SCHEMA_WEBSOCKET_ADD_ITEM = \ websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({ vol.Required('type'): WS_TYPE_SHOPPING_LIST_ADD_ITEM, vol.Required('name'): str }) SCHEMA_WEBSOCKET_UPDATE_ITEM = \ websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({ vol.Required('type'): WS_TYPE_SHOPPING_LIST_UPDATE_ITEM, vol.Required('item_id'): str, vol.Optional('name'): str, vol.Optional('complete'): bool }) SCHEMA_WEBSOCKET_CLEAR_ITEMS = \ websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({ vol.Required('type'): WS_TYPE_SHOPPING_LIST_CLEAR_ITEMS }) @asyncio.coroutine def async_setup(hass, config): """Initialize the shopping list.""" @asyncio.coroutine def add_item_service(call): """Add an item with `name`.""" data = hass.data[DOMAIN] name = call.data.get(ATTR_NAME) if name is not None: data.async_add(name) @asyncio.coroutine def complete_item_service(call): """Mark the item provided via `name` as completed.""" data = hass.data[DOMAIN] name = call.data.get(ATTR_NAME) if name is None: return try: item = [item for item in data.items if item['name'] == name][0] except IndexError: _LOGGER.error("Removing of item failed: %s cannot be found", name) else: data.async_update(item['id'], {'name': name, 'complete': True}) data = hass.data[DOMAIN] = ShoppingData(hass) yield from data.async_load() intent.async_register(hass, AddItemIntent()) intent.async_register(hass, ListTopItemsIntent()) hass.services.async_register( DOMAIN, SERVICE_ADD_ITEM, add_item_service, schema=SERVICE_ITEM_SCHEMA ) hass.services.async_register( DOMAIN, SERVICE_COMPLETE_ITEM, complete_item_service, schema=SERVICE_ITEM_SCHEMA ) hass.http.register_view(ShoppingListView) hass.http.register_view(CreateShoppingListItemView) hass.http.register_view(UpdateShoppingListItemView) hass.http.register_view(ClearCompletedItemsView) hass.components.conversation.async_register(INTENT_ADD_ITEM, [ 'Add [the] [a] [an] {item} to my shopping list', ]) hass.components.conversation.async_register(INTENT_LAST_ITEMS, [ 'What is on my shopping list' ]) yield from hass.components.frontend.async_register_built_in_panel( 'shopping-list', 'shopping_list', 'mdi:cart') hass.components.websocket_api.async_register_command( WS_TYPE_SHOPPING_LIST_ITEMS, websocket_handle_items, SCHEMA_WEBSOCKET_ITEMS) hass.components.websocket_api.async_register_command( WS_TYPE_SHOPPING_LIST_ADD_ITEM, websocket_handle_add, SCHEMA_WEBSOCKET_ADD_ITEM) hass.components.websocket_api.async_register_command( WS_TYPE_SHOPPING_LIST_UPDATE_ITEM, websocket_handle_update, SCHEMA_WEBSOCKET_UPDATE_ITEM) hass.components.websocket_api.async_register_command( WS_TYPE_SHOPPING_LIST_CLEAR_ITEMS, websocket_handle_clear, SCHEMA_WEBSOCKET_CLEAR_ITEMS) return True class ShoppingData: """Class to hold shopping list data.""" def __init__(self, hass): """Initialize the shopping list.""" self.hass = hass self.items = [] @callback def async_add(self, name): """Add a shopping list item.""" item = { 'name': name, 'id': uuid.uuid4().hex, 'complete': False } self.items.append(item) self.hass.async_add_job(self.save) return item @callback def async_update(self, item_id, info): """Update a shopping list item.""" item = next((itm for itm in self.items if itm['id'] == item_id), None) if item is None: raise KeyError info = ITEM_UPDATE_SCHEMA(info) item.update(info) self.hass.async_add_job(self.save) return item @callback def async_clear_completed(self): """Clear completed items.""" self.items = [itm for itm in self.items if not itm['complete']] self.hass.async_add_job(self.save) @asyncio.coroutine def async_load(self): """Load items.""" def load(): """Load the items synchronously.""" return load_json(self.hass.config.path(PERSISTENCE), default=[]) self.items = yield from self.hass.async_add_job(load) def save(self): """Save the items.""" save_json(self.hass.config.path(PERSISTENCE), self.items) class AddItemIntent(intent.IntentHandler): """Handle AddItem intents.""" intent_type = INTENT_ADD_ITEM slot_schema = { 'item': cv.string } @asyncio.coroutine def async_handle(self, intent_obj): """Handle the intent.""" slots = self.async_validate_slots(intent_obj.slots) item = slots['item']['value'] intent_obj.hass.data[DOMAIN].async_add(item) response = intent_obj.create_response() response.async_set_speech( "I've added {} to your shopping list".format(item)) intent_obj.hass.bus.async_fire(EVENT) return response class ListTopItemsIntent(intent.IntentHandler): """Handle AddItem intents.""" intent_type = INTENT_LAST_ITEMS slot_schema = { 'item': cv.string } @asyncio.coroutine def async_handle(self, intent_obj): """Handle the intent.""" items = intent_obj.hass.data[DOMAIN].items[-5:] response = intent_obj.create_response() if not items: response.async_set_speech( "There are no items on your shopping list") else: response.async_set_speech( "These are the top {} items on your shopping list: {}".format( min(len(items), 5), ', '.join(itm['name'] for itm in reversed(items)))) return response class ShoppingListView(http.HomeAssistantView): """View to retrieve shopping list content.""" url = '/api/shopping_list' name = "api:shopping_list" @callback def get(self, request): """Retrieve shopping list items.""" return self.json(request.app['hass'].data[DOMAIN].items) class UpdateShoppingListItemView(http.HomeAssistantView): """View to retrieve shopping list content.""" url = '/api/shopping_list/item/{item_id}' name = "api:shopping_list:item:id" async def post(self, request, item_id): """Update a shopping list item.""" data = await request.json() try: item = request.app['hass'].data[DOMAIN].async_update(item_id, data) request.app['hass'].bus.async_fire(EVENT) return self.json(item) except KeyError: return self.json_message('Item not found', HTTP_NOT_FOUND) except vol.Invalid: return self.json_message('Item not found', HTTP_BAD_REQUEST) class CreateShoppingListItemView(http.HomeAssistantView): """View to retrieve shopping list content.""" url = '/api/shopping_list/item' name = "api:shopping_list:item" @RequestDataValidator(vol.Schema({ vol.Required('name'): str, })) @asyncio.coroutine def post(self, request, data): """Create a new shopping list item.""" item = request.app['hass'].data[DOMAIN].async_add(data['name']) request.app['hass'].bus.async_fire(EVENT) return self.json(item) class ClearCompletedItemsView(http.HomeAssistantView): """View to retrieve shopping list content.""" url = '/api/shopping_list/clear_completed' name = "api:shopping_list:clear_completed" @callback def post(self, request): """Retrieve if API is running.""" hass = request.app['hass'] hass.data[DOMAIN].async_clear_completed() hass.bus.async_fire(EVENT) return self.json_message('Cleared completed items.') @callback def websocket_handle_items(hass, connection, msg): """Handle get shopping_list items.""" connection.send_message(websocket_api.result_message( msg['id'], hass.data[DOMAIN].items)) @callback def websocket_handle_add(hass, connection, msg): """Handle add item to shopping_list.""" item = hass.data[DOMAIN].async_add(msg['name']) hass.bus.async_fire(EVENT) connection.send_message(websocket_api.result_message( msg['id'], item)) @websocket_api.async_response async def websocket_handle_update(hass, connection, msg): """Handle update shopping_list item.""" msg_id = msg.pop('id') item_id = msg.pop('item_id') msg.pop('type') data = msg try: item = hass.data[DOMAIN].async_update(item_id, data) hass.bus.async_fire(EVENT) connection.send_message(websocket_api.result_message( msg_id, item)) except KeyError: connection.send_message(websocket_api.error_message( msg_id, 'item_not_found', 'Item not found')) @callback def websocket_handle_clear(hass, connection, msg): """Handle clearing shopping_list items.""" hass.data[DOMAIN].async_clear_completed() hass.bus.async_fire(EVENT) connection.send_message(websocket_api.result_message(msg['id']))
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/shopping_list/__init__.py
"""Support for Blink system camera.""" import logging from homeassistant.components.camera import Camera from . import BLINK_DATA, DEFAULT_BRAND _LOGGER = logging.getLogger(__name__) ATTR_VIDEO_CLIP = 'video' ATTR_IMAGE = 'image' def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a Blink Camera.""" if discovery_info is None: return data = hass.data[BLINK_DATA] devs = [] for name, camera in data.cameras.items(): devs.append(BlinkCamera(data, name, camera)) add_entities(devs) class BlinkCamera(Camera): """An implementation of a Blink Camera.""" def __init__(self, data, name, camera): """Initialize a camera.""" super().__init__() self.data = data self._name = "{} {}".format(BLINK_DATA, name) self._camera = camera self._unique_id = "{}-camera".format(camera.serial) self.response = None self.current_image = None self.last_image = None _LOGGER.debug("Initialized blink camera %s", self._name) @property def name(self): """Return the camera name.""" return self._name @property def unique_id(self): """Return the unique camera id.""" return self._unique_id @property def device_state_attributes(self): """Return the camera attributes.""" return self._camera.attributes def enable_motion_detection(self): """Enable motion detection for the camera.""" self._camera.set_motion_detect(True) def disable_motion_detection(self): """Disable motion detection for the camera.""" self._camera.set_motion_detect(False) @property def motion_detection_enabled(self): """Return the state of the camera.""" return self._camera.motion_enabled @property def brand(self): """Return the camera brand.""" return DEFAULT_BRAND def camera_image(self): """Return a still image response from the camera.""" return self._camera.image_from_cache.content
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/blink/camera.py
"""Support for the Lovelace UI.""" from functools import wraps import logging import os import time import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.exceptions import HomeAssistantError from homeassistant.util.yaml import load_yaml _LOGGER = logging.getLogger(__name__) DOMAIN = 'lovelace' STORAGE_KEY = DOMAIN STORAGE_VERSION = 1 CONF_MODE = 'mode' MODE_YAML = 'yaml' MODE_STORAGE = 'storage' CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Optional(CONF_MODE, default=MODE_STORAGE): vol.All(vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])), }), }, extra=vol.ALLOW_EXTRA) LOVELACE_CONFIG_FILE = 'ui-lovelace.yaml' WS_TYPE_GET_LOVELACE_UI = 'lovelace/config' WS_TYPE_SAVE_CONFIG = 'lovelace/config/save' SCHEMA_GET_LOVELACE_UI = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({ vol.Required('type'): WS_TYPE_GET_LOVELACE_UI, vol.Optional('force', default=False): bool, }) SCHEMA_SAVE_CONFIG = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({ vol.Required('type'): WS_TYPE_SAVE_CONFIG, vol.Required('config'): vol.Any(str, dict), }) class ConfigNotFound(HomeAssistantError): """When no config available.""" async def async_setup(hass, config): """Set up the Lovelace commands.""" # Pass in default to `get` because defaults not set if loaded as dep mode = config.get(DOMAIN, {}).get(CONF_MODE, MODE_STORAGE) await hass.components.frontend.async_register_built_in_panel( DOMAIN, config={ 'mode': mode }) if mode == MODE_YAML: hass.data[DOMAIN] = LovelaceYAML(hass) else: hass.data[DOMAIN] = LovelaceStorage(hass) hass.components.websocket_api.async_register_command( WS_TYPE_GET_LOVELACE_UI, websocket_lovelace_config, SCHEMA_GET_LOVELACE_UI) hass.components.websocket_api.async_register_command( WS_TYPE_SAVE_CONFIG, websocket_lovelace_save_config, SCHEMA_SAVE_CONFIG) hass.components.system_health.async_register_info( DOMAIN, system_health_info) return True class LovelaceStorage: """Class to handle Storage based Lovelace config.""" def __init__(self, hass): """Initialize Lovelace config based on storage helper.""" self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) self._data = None async def async_get_info(self): """Return the YAML storage mode.""" if self._data is None: await self._load() if self._data['config'] is None: return { 'mode': 'auto-gen' } return _config_info('storage', self._data['config']) async def async_load(self, force): """Load config.""" if self._data is None: await self._load() config = self._data['config'] if config is None: raise ConfigNotFound return config async def async_save(self, config): """Save config.""" if self._data is None: await self._load() self._data['config'] = config await self._store.async_save(self._data) async def _load(self): """Load the config.""" data = await self._store.async_load() self._data = data if data else {'config': None} class LovelaceYAML: """Class to handle YAML-based Lovelace config.""" def __init__(self, hass): """Initialize the YAML config.""" self.hass = hass self._cache = None async def async_get_info(self): """Return the YAML storage mode.""" try: config = await self.async_load(False) except ConfigNotFound: return { 'mode': 'yaml', 'error': '{} not found'.format( self.hass.config.path(LOVELACE_CONFIG_FILE)) } return _config_info('yaml', config) async def async_load(self, force): """Load config.""" return await self.hass.async_add_executor_job(self._load_config, force) def _load_config(self, force): """Load the actual config.""" fname = self.hass.config.path(LOVELACE_CONFIG_FILE) # Check for a cached version of the config if not force and self._cache is not None: config, last_update = self._cache modtime = os.path.getmtime(fname) if config and last_update > modtime: return config try: config = load_yaml(fname) except FileNotFoundError: raise ConfigNotFound from None self._cache = (config, time.time()) return config async def async_save(self, config): """Save config.""" raise HomeAssistantError('Not supported') def handle_yaml_errors(func): """Handle error with WebSocket calls.""" @wraps(func) async def send_with_error_handling(hass, connection, msg): error = None try: result = await func(hass, connection, msg) message = websocket_api.result_message( msg['id'], result ) except ConfigNotFound: error = 'config_not_found', 'No config found.' except HomeAssistantError as err: error = 'error', str(err) if error is not None: message = websocket_api.error_message(msg['id'], *error) connection.send_message(message) return send_with_error_handling @websocket_api.async_response @handle_yaml_errors async def websocket_lovelace_config(hass, connection, msg): """Send Lovelace UI config over WebSocket configuration.""" return await hass.data[DOMAIN].async_load(msg['force']) @websocket_api.async_response @handle_yaml_errors async def websocket_lovelace_save_config(hass, connection, msg): """Save Lovelace UI configuration.""" await hass.data[DOMAIN].async_save(msg['config']) async def system_health_info(hass): """Get info for the info page.""" return await hass.data[DOMAIN].async_get_info() def _config_info(mode, config): """Generate info about the config.""" return { 'mode': mode, 'resources': len(config.get('resources', [])), 'views': len(config.get('views', [])) }
"""The tests for the Rfxtrx light platform.""" import unittest import pytest from homeassistant.setup import setup_component from homeassistant.components import rfxtrx as rfxtrx_core from tests.common import get_test_home_assistant, mock_component @pytest.mark.skipif("os.environ.get('RFXTRX') != 'RUN'") class TestLightRfxtrx(unittest.TestCase): """Test the Rfxtrx light platform.""" def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, 'rfxtrx') def tearDown(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} if rfxtrx_core.RFXOBJECT: rfxtrx_core.RFXOBJECT.close_connection() self.hass.stop() def test_valid_config(self): """Test configuration.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx_core.ATTR_FIREEVENT: True}}}}) assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', 'signal_repetitions': 3}}}}) def test_invalid_config(self): """Test configuration.""" assert not setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'invalid_key': 'afda', 'devices': {'213c7f216': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51', rfxtrx_core.ATTR_FIREEVENT: True}}}}) def test_default_config(self): """Test with 0 switches.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {}}}) assert 0 == len(rfxtrx_core.RFX_DEVICES) def test_old_config(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'123efab1': { 'name': 'Test', 'packetid': '0b1100cd0213c7f210010f51'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 def test_one_light(self): """Test with 1 light.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test'}}}}) import RFXtrx as rfxtrxmod rfxtrx_core.RFXOBJECT =\ rfxtrxmod.Core("", transport_protocol=rfxtrxmod.DummyTransport) assert 1 == len(rfxtrx_core.RFX_DEVICES) entity = rfxtrx_core.RFX_DEVICES['213c7f216'] assert 'Test' == entity.name assert 'off' == entity.state assert entity.assumed_state assert entity.signal_repetitions == 1 assert not entity.should_fire_event assert not entity.should_poll assert not entity.is_on entity.turn_on() assert entity.is_on assert entity.brightness == 255 entity.turn_off() assert not entity.is_on assert entity.brightness == 0 entity.turn_on(brightness=100) assert entity.is_on assert entity.brightness == 100 entity.turn_on(brightness=10) assert entity.is_on assert entity.brightness == 10 entity.turn_on(brightness=255) assert entity.is_on assert entity.brightness == 255 entity.turn_off() entity_id = rfxtrx_core.RFX_DEVICES['213c7f216'].entity_id entity_hass = self.hass.states.get(entity_id) assert 'Test' == entity_hass.name assert 'off' == entity_hass.state entity.turn_on() entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_off() entity_hass = self.hass.states.get(entity_id) assert 'off' == entity_hass.state entity.turn_on(brightness=100) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=10) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state entity.turn_on(brightness=255) entity_hass = self.hass.states.get(entity_id) assert 'on' == entity_hass.state def test_several_lights(self): """Test with 3 lights.""" assert setup_component(self.hass, 'light', { 'light': { 'platform': 'rfxtrx', 'signal_repetitions': 3, 'devices': { '0b1100cd0213c7f230010f71': { 'name': 'Test'}, '0b1100100118cdea02010f70': { 'name': 'Bath'}, '0b1100101118cdea02010f70': { 'name': 'Living'}}}}) assert 3 == len(rfxtrx_core.RFX_DEVICES) device_num = 0 for id in rfxtrx_core.RFX_DEVICES: entity = rfxtrx_core.RFX_DEVICES[id] assert entity.signal_repetitions == 3 if entity.name == 'Living': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Living: off>' == entity.__str__() elif entity.name == 'Bath': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Bath: off>' == entity.__str__() elif entity.name == 'Test': device_num = device_num + 1 assert 'off' == entity.state assert '<Entity Test: off>' == entity.__str__() assert 3 == device_num def test_discover_light(self): """Test with discovery of lights.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b11009e00e6116202020070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x02\x02\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['0e611622'] assert 1 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b11009e00e6116202020070: on>' == \ entity.__str__() event = rfxtrx_core.get_rfx_object('0b11009e00e6116201010070') event.data = bytearray(b'\x0b\x11\x00\x9e\x00\xe6\x11b\x01\x01\x00p') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 1 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) entity = rfxtrx_core.RFX_DEVICES['118cdea2'] assert 2 == len(rfxtrx_core.RFX_DEVICES) assert '<Entity 0b1100120118cdea02020070: on>' == \ entity.__str__() # trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 2 == len(rfxtrx_core.RFX_DEVICES) def test_discover_light_noautoadd(self): """Test with discover of light when auto add is False.""" assert setup_component(self.hass, 'light', { 'light': {'platform': 'rfxtrx', 'automatic_add': False, 'devices': {}}}) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02010070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x01, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) event = rfxtrx_core.get_rfx_object('0b1100120118cdea02020070') event.data = bytearray([0x0b, 0x11, 0x00, 0x12, 0x01, 0x18, 0xcd, 0xea, 0x02, 0x02, 0x00, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a sensor event = rfxtrx_core.get_rfx_object('0a52085e070100b31b0279') event.data = bytearray(b'\nR\x08^\x07\x01\x00\xb3\x1b\x02y') rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a switch event = rfxtrx_core.get_rfx_object('0b1100100118cdea02010f70') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES) # Trying to add a rollershutter event = rfxtrx_core.get_rfx_object('0a1400adf394ab020e0060') event.data = bytearray([0x0A, 0x14, 0x00, 0xAD, 0xF3, 0x94, 0xAB, 0x02, 0x0E, 0x00, 0x60]) rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS[0](event) assert 0 == len(rfxtrx_core.RFX_DEVICES)
molobrakos/home-assistant
tests/components/light/test_rfxtrx.py
homeassistant/components/lovelace/__init__.py
from __future__ import with_statement from datetime import datetime import os from django.conf import settings from kuma.core.cache import memcache from kuma.core.tests import ok_ from kuma.users.models import User from kuma.users.tests import UserTestCase, user from . import document, revision from ..models import Document, DocumentSpamAttempt from ..tasks import (build_sitemaps, delete_old_documentspamattempt_data, update_community_stats) class UpdateCommunityStatsTests(UserTestCase): contributors = 10 def setUp(self): super(UpdateCommunityStatsTests, self).setUp() self.cache = memcache def test_empty_community_stats(self): update_community_stats() stats = self.cache.get('community_stats') self.assertIsNone(stats) def test_populated_community_stats(self): for i in range(self.contributors): if i % 2 == 0: locale = 'en-US' else: locale = 'pt-BR' test_user = user(save=True) doc = document(save=True, locale=locale) revision(save=True, creator=test_user, document=doc) update_community_stats() stats = self.cache.get('community_stats') self.assertIsNotNone(stats) self.assertIn('contributors', stats) self.assertIn('locales', stats) self.assertIsInstance(stats['contributors'], long) self.assertIsInstance(stats['locales'], long) self.assertEqual(stats['contributors'], self.contributors) self.assertEqual(stats['locales'], 2) class SitemapsTestCase(UserTestCase): fixtures = UserTestCase.fixtures + ['wiki/documents.json'] def test_sitemaps_files(self): build_sitemaps() locales = (Document.objects.filter_for_list() .values_list('locale', flat=True)) expected_sitemap_locs = [] for locale in set(locales): # we'll expect to see this locale in the sitemap index file expected_sitemap_locs.append( "<loc>https://example.com/sitemaps/%s/sitemap.xml</loc>" % locale ) sitemap_path = os.path.join(settings.MEDIA_ROOT, 'sitemaps', locale, 'sitemap.xml') with open(sitemap_path, 'r') as sitemap_file: sitemap_xml = sitemap_file.read() docs = Document.objects.filter_for_list(locale=locale) for doc in docs: ok_(doc.modified.strftime('%Y-%m-%d') in sitemap_xml) ok_(doc.slug in sitemap_xml) sitemap_path = os.path.join(settings.MEDIA_ROOT, 'sitemap.xml') with open(sitemap_path, 'r') as sitemap_file: index_xml = sitemap_file.read() for loc in expected_sitemap_locs: ok_(loc in index_xml) class DeleteOldDocumentSpamAttemptData(UserTestCase): fixtures = UserTestCase.fixtures def test_delete_old_data(self): user = User.objects.get(username='testuser01') admin = User.objects.get(username='admin') new_dsa = DocumentSpamAttempt.objects.create( user=user, title='new record', slug='users:me', data='{"PII": "IP, email, etc."}') old_reviewed_dsa = DocumentSpamAttempt.objects.create( user=user, title='old ham', data='{"PII": "plenty"}', review=DocumentSpamAttempt.HAM, reviewer=admin) old_unreviewed_dsa = DocumentSpamAttempt.objects.create( user=user, title='old unknown', data='{"PII": "yep"}') # created is auto-set to current time, update bypasses model logic old_date = datetime(2015, 1, 1) ids = [old_reviewed_dsa.id, old_unreviewed_dsa.id] DocumentSpamAttempt.objects.filter(id__in=ids).update(created=old_date) delete_old_documentspamattempt_data() new_dsa.refresh_from_db() assert new_dsa.data is not None old_reviewed_dsa.refresh_from_db() assert old_reviewed_dsa.data is None assert old_reviewed_dsa.review == DocumentSpamAttempt.HAM old_unreviewed_dsa.refresh_from_db() assert old_unreviewed_dsa.data is None assert old_unreviewed_dsa.review == ( DocumentSpamAttempt.REVIEW_UNAVAILABLE)
# -*- coding: utf-8 -*- import json from django.core import mail from django.test import RequestFactory import pytest import requests_mock from constance.test import override_config from waffle.models import Flag from kuma.spam.constants import (CHECK_URL, SPAM_ADMIN_FLAG, SPAM_SPAMMER_FLAG, SPAM_TESTING_FLAG, SPAM_CHECKS_FLAG, VERIFY_URL) from kuma.users.tests import UserTestCase, UserTransactionTestCase from ..constants import SPAM_EXEMPTED_FLAG, SPAM_TRAINING_FLAG from ..forms import AkismetHistoricalData, RevisionForm, TreeMoveForm from ..models import DocumentSpamAttempt, Revision, RevisionIP from ..tests import document, normalize_html, revision class AkismetHistoricalDataTests(UserTestCase): """Tests for AkismetHistoricalData.""" rf = RequestFactory() base_akismet_payload = { 'blog_charset': 'UTF-8', 'blog_lang': u'en_us', 'comment_author': u'Test User', 'comment_author_email': u'testuser@test.com', 'comment_content': ( 'Sample\n' 'SampleSlug\n' 'content\n' 'Comment' ), 'comment_type': 'wiki-revision', 'referrer': '', 'user_agent': '', 'user_ip': '0.0.0.0' } def setUp(self): super(AkismetHistoricalDataTests, self).setUp() self.user = self.user_model.objects.get(username='testuser') self.revision = revision(save=True, content='content', title='Sample', slug='SampleSlug', comment='Comment', summary='', tags='') def test_no_revision_ip_no_request(self): """ Test Akismet payload with no RevisionIP or request. This is a possible payload from ./manage.py submit_deleted_documents. """ params = AkismetHistoricalData(self.revision).parameters assert params == self.base_akismet_payload def test_revision_ip_no_data(self): """ Test Akismet payload with a RevisionIP without data. This is a possible payload from an April 2016 revision. """ RevisionIP.objects.create(revision=self.revision, ip='127.0.0.1', user_agent='Agent', referrer='Referrer') request = self.rf.get('/en-US/dashboard/revisions') params = AkismetHistoricalData(self.revision, request).parameters expected = self.base_akismet_payload.copy() expected.update({ 'blog': 'http://testserver/', 'permalink': 'http://testserver/en-US/docs/SampleSlug', 'referrer': 'Referrer', 'user_agent': 'Agent', 'user_ip': '127.0.0.1', }) assert params == expected def test_revision_ip_with_data(self): """ Test Akismet payload is the data from the RevisionIP. This payload is from a revision after April 2016. """ RevisionIP.objects.create(revision=self.revision, ip='127.0.0.1', user_agent='Agent', referrer='Referrer', data='{"content": "spammy"}') request = self.rf.get('/en-US/dashboard/revisions') params = AkismetHistoricalData(self.revision, request).parameters assert params == {'content': 'spammy'} class RevisionFormTests(UserTransactionTestCase): """ Generic tests for RevisionForm. See RevisionFormEditTests, etc. for tests that simulate using the form in a view. """ rf = RequestFactory() def test_form_onload_attr_filter(self): """ RevisionForm should strip out any harmful onload attributes from input markup bug 821986 """ rev = revision(save=True, is_approved=True, content=""" <svg><circle onload=confirm(3)> """) request = self.rf.get('/') rev_form = RevisionForm(instance=rev, request=request) self.assertNotIn('onload', rev_form.initial['content']) def test_form_loaded_with_section(self): """ RevisionForm given section_id should load initial content for only one section """ rev = revision(save=True, is_approved=True, content=""" <h1 id="s1">s1</h1> <p>test</p> <p>test</p> <h1 id="s2">s2</h1> <p>test</p> <p>test</p> <h1 id="s3">s3</h1> <p>test</p> <p>test</p> """) expected = """ <h1 id="s2">s2</h1> <p>test</p> <p>test</p> """ request = self.rf.get('/') rev_form = RevisionForm(instance=rev, section_id='s2', request=request) self.assertEqual(normalize_html(expected), normalize_html(rev_form.initial['content'])) def test_form_save_section(self): rev = revision(save=True, is_approved=True, content=""" <h1 id="s1">s1</h1> <p>test</p> <p>test</p> <h1 id="s2">s2</h1> <p>test</p> <p>test</p> <h1 id="s3">s3</h1> <p>test</p> <p>test</p> """) replace_content = """ <h1 id="s2">New stuff</h1> <p>new stuff</p> """ expected = """ <h1 id="s1">s1</h1> <p>test</p> <p>test</p> <h1 id="s2">New stuff</h1> <p>new stuff</p> <h1 id="s3">s3</h1> <p>test</p> <p>test</p> """ request = self.rf.get('/') request.user = rev.creator rev_form = RevisionForm(data={'content': replace_content}, instance=rev, section_id='s2', request=request) new_rev = rev_form.save(rev.document) self.assertEqual(normalize_html(expected), normalize_html(new_rev.content)) def test_form_rejects_empty_slugs_with_parent(self): """ RevisionForm should reject empty slugs, even if there is a parent slug portion """ data = { 'slug': '', 'title': 'Title', 'content': 'Content', } request = self.rf.get('/') request.user = self.user_model.objects.get(username='testuser') rev_form = RevisionForm(data=data, request=request, parent_slug='User:groovecoder') self.assertFalse(rev_form.is_valid()) def test_multiword_tags(self): rev = revision(save=True) request = self.rf.get('/') request.user = rev.creator data = { 'content': 'Content', 'toc_depth': 1, 'tags': '"MDN Meta"', } rev_form = RevisionForm(data=data, instance=rev, request=request) self.assertTrue(rev_form.is_valid()) self.assertEqual(rev_form.cleaned_data['tags'], '"MDN Meta"') def test_case_sensitive_tags(self): """ RevisionForm should reject new tags that are the same as existing tags that only differ by case. """ rev = revision(save=True, tags='"JavaScript"') request = self.rf.get('/') request.user = rev.creator data = { 'content': 'Content', 'toc_depth': 1, 'tags': 'Javascript', # Note the lower-case "S". } rev_form = RevisionForm(data=data, instance=rev, request=request) self.assertTrue(rev_form.is_valid()) self.assertEqual(rev_form.cleaned_data['tags'], '"JavaScript"') @override_config(AKISMET_KEY='forms') class RevisionFormViewTests(UserTransactionTestCase): """Setup tests for RevisionForm as used in views.""" rf = RequestFactory() akismet_keys = [ # Keys for a new English page or new translation 'REMOTE_ADDR', 'blog', 'blog_charset', 'blog_lang', 'comment_author', 'comment_author_email', 'comment_content', 'comment_type', 'referrer', 'user_agent', 'user_ip', ] # Keys for a page edit (English or translation) akismet_keys_edit = sorted(akismet_keys + ['permalink']) def setUp(self): super(RevisionFormViewTests, self).setUp() self.testuser = self.user_model.objects.get(username='testuser') Flag.objects.update_or_create( name=SPAM_CHECKS_FLAG, defaults={'everyone': True}, ) def tearDown(self): super(RevisionFormViewTests, self).tearDown() Flag.objects.filter(name=SPAM_EXEMPTED_FLAG).delete() Flag.objects.update_or_create( name=SPAM_CHECKS_FLAG, defaults={'everyone': None}, ) class RevisionFormEditTests(RevisionFormViewTests): """Test RevisionForm as used in edit view. Includes Akismet enabled, spam/ham, and training tests. These require a RevisionForm setup for POST validation, but are not unique to editing. """ original = { # Default attributes of original revision 'content': ( '<h2 id="Summary">Summary</h2>\r\n' '<p>The <strong><code>display</code></strong> CSS property' ' specifies the type of rendering box used for an element.</p>\r\n' '<p>{{cssinfo}}</p>\r\n' '<h2 id="Syntax">Syntax</h2>\r\n' '<pre class="brush:css">\r\n' 'display: none;\r\n' '</pre>\r\n' ), 'slug': 'Web/CSS/display', 'tags': '"CSS" "CSS Property" "Reference"', 'title': 'display', 'toc_depth': Revision.TOC_DEPTH_ALL, } view_data_extra = { # Extra data from view, derived from POST 'form': 'rev', 'content': ( '<h2 id="Summary">Summary</h2>\r\n' '<p>The <strong><code>display</code></strong> CSS property' ' specifies the type of rendering box used for an element.</p>\r\n' '<p>{{cssinfo}} and my changes.</p>\r\n' '<h2 id="Syntax">Syntax</h2>\r\n' '<p><a href="http://spam.example.com">Buy my product!</a></p>\r\n' '<pre class="brush:css">display: none;</pre>\r\n' ), 'comment': 'Comment', 'days': '0', 'hours': '0', 'minutes': '0', 'render_max_age': '0', 'parent_id': '', 'review_tags': [], } def setup_form( self, mock_requests, override_original=None, override_data=None, is_spam='false'): """ Setup a RevisionForm for a POST to edit a page. Parameters: * mock_requests - Mockable requests for Akismet checks * override_original - Add or modify original revision * override_data - Add or modify the view data * is_spam - Response from the Akismet check-comment URL """ revision(save=True, slug='Web') revision(save=True, slug='Web/CSS') original_params = self.original.copy() original_params.update(override_original or {}) previous_revision = revision(save=True, **original_params) data = self.original.copy() data['current_rev'] = str(previous_revision.id) del data['slug'] # Not included in edit POST data.update(self.view_data_extra) data.update(override_data or {}) request = self.rf.post('/en-US/docs/Web/CSS/display$edit') request.user = self.testuser mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(CHECK_URL, content=is_spam) section_id = None is_iframe_target = False rev_form = RevisionForm(request=request, data=data, is_iframe_target=is_iframe_target, section_id=section_id) rev_form.instance.document = previous_revision.document return rev_form @pytest.mark.spam @requests_mock.mock() def test_standard_edit(self, mock_requests): """Test Akismet parameters for edited English pages.""" rev_form = self.setup_form(mock_requests) assert rev_form.is_valid(), rev_form.errors parameters = rev_form.akismet_parameters() assert sorted(parameters.keys()) == self.akismet_keys_edit expected_content = ( '<p>{{cssinfo}} and my changes.</p>\n' '<p><a href="http://spam.example.com">Buy my product!</a></p>\n' '<pre class="brush:css">display: none;</pre>\n' 'Comment' ) assert parameters['comment_content'] == expected_content assert parameters['comment_type'] == 'wiki-revision' assert parameters['blog'] == 'http://testserver/' assert parameters['blog_lang'] == 'en_us' assert parameters['blog_charset'] == 'UTF-8' assert parameters['REMOTE_ADDR'] == '127.0.0.1' assert parameters['permalink'] == ('http://testserver/en-US/docs/' 'Web/CSS/display') @pytest.mark.spam @requests_mock.mock() def test_change_tags_edit(self, mock_requests): """ Test Akismet parameters for edited legacy pages. Keywords and summary are included in the form if the legacy page includes them. """ new_tags = '"CSS" "CSS Property" "Reference" "CSS Positioning"' rev_form = self.setup_form(mock_requests, override_data={'tags': new_tags}) assert rev_form.is_valid() parameters = rev_form.akismet_parameters() assert sorted(parameters.keys()) == self.akismet_keys_edit expected_content = ( '<p>{{cssinfo}} and my changes.</p>\n' '<p><a href="http://spam.example.com">Buy my product!</a></p>\n' '<pre class="brush:css">display: none;</pre>\n' 'Comment\n' 'CSS Positioning' ) assert parameters['comment_content'] == expected_content @pytest.mark.spam @requests_mock.mock() def test_legacy_edit(self, mock_requests): """ Test Akismet parameters for edited legacy pages. Keywords and summary are included in the form if the legacy page includes them. """ legacy_fields = {'keywords': 'CSS, display', 'summary': 'CSS property display'} extra_post_data = {'keywords': 'CSS display, hidden', 'summary': 'The CSS property display', 'comment': 'Updated'} rev_form = self.setup_form(mock_requests, override_original=legacy_fields, override_data=extra_post_data) assert rev_form.is_valid() parameters = rev_form.akismet_parameters() assert sorted(parameters.keys()) == self.akismet_keys_edit expected_content = ( 'The CSS property display\n' + '<p>{{cssinfo}} and my changes.</p>\n' '<p><a href="http://spam.example.com">Buy my product!</a></p>\n' '<pre class="brush:css">display: none;</pre>\n' 'Updated\n' 'CSS display, hidden' ) assert parameters['comment_content'] == expected_content @pytest.mark.spam @requests_mock.mock() def test_quoteless_tags(self, mock_requests): """ Test Akismet parameters when the tags are saved without quotes. Tracked in bug 1268511. """ tags = {'tags': 'CodingScripting, Glossary'} rev_form = self.setup_form(mock_requests, override_original=tags) assert rev_form.is_valid() parameters = rev_form.akismet_parameters() assert sorted(parameters.keys()) == self.akismet_keys_edit expected_content = ( '<p>{{cssinfo}} and my changes.</p>\n' '<p><a href="http://spam.example.com">Buy my product!</a></p>\n' '<pre class="brush:css">display: none;</pre>\n' 'Comment\n' 'CSS\n' 'CSS Property\n' 'Reference' ) assert parameters['comment_content'] == expected_content @pytest.mark.spam @requests_mock.mock() def test_akismet_enabled(self, mock_requests): rev_form = self.setup_form(mock_requests) assert rev_form.akismet_enabled() # create the waffle flag and add the test user to it flag, created = Flag.objects.get_or_create(name=SPAM_EXEMPTED_FLAG) flag.users.add(self.testuser) # now disabled because the test user is exempted from the spam check assert not rev_form.akismet_enabled() @requests_mock.mock() @pytest.mark.spam def test_akismet_ham(self, mock_requests): assert DocumentSpamAttempt.objects.count() == 0 assert len(mail.outbox) == 0 rev_form = self.setup_form(mock_requests) assert rev_form.is_valid() assert DocumentSpamAttempt.objects.count() == 0 @requests_mock.mock() @pytest.mark.spam def test_akismet_spam(self, mock_requests): assert DocumentSpamAttempt.objects.count() == 0 assert len(mail.outbox) == 0 rev_form = self.setup_form(mock_requests, is_spam='true') assert not rev_form.is_valid() assert rev_form.errors == {'__all__': [rev_form.akismet_error_message]} assert DocumentSpamAttempt.objects.count() > 0 attempt = DocumentSpamAttempt.objects.latest() assert attempt.title == 'display' assert attempt.slug == 'Web/CSS/display' assert attempt.user == self.testuser assert attempt.review == DocumentSpamAttempt.NEEDS_REVIEW assert attempt.data data = json.loads(attempt.data) assert 'akismet_status_code' not in data # Test that one message has been sent. assert len(mail.outbox) == 1 body = mail.outbox[0].body assert attempt.title in body assert attempt.slug in body assert attempt.user.username in body @requests_mock.mock() @pytest.mark.spam def test_akismet_error(self, mock_requests): assert DocumentSpamAttempt.objects.count() == 0 assert len(mail.outbox) == 0 rev_form = self.setup_form(mock_requests, is_spam='terrible') assert not rev_form.is_valid() assert rev_form.errors == {'__all__': [rev_form.akismet_error_message]} assert DocumentSpamAttempt.objects.count() > 0 attempt = DocumentSpamAttempt.objects.latest() assert attempt.review == DocumentSpamAttempt.AKISMET_ERROR assert attempt.data data = json.loads(attempt.data) assert data['akismet_status_code'] == 200 assert data['akismet_debug_help'] == 'Not provided' assert data['akismet_response'] == 'terrible' assert len(mail.outbox) == 1 @pytest.mark.spam @requests_mock.mock() def test_akismet_spam_training(self, mock_requests): flag, created = Flag.objects.get_or_create(name=SPAM_TRAINING_FLAG) flag.users.add(self.testuser) assert not DocumentSpamAttempt.objects.exists() rev_form = self.setup_form(mock_requests, is_spam='true') assert rev_form.is_valid() assert DocumentSpamAttempt.objects.count() == 1 attempt = DocumentSpamAttempt.objects.get() assert attempt.user == self.testuser assert attempt.review == DocumentSpamAttempt.NEEDS_REVIEW @pytest.mark.spam @requests_mock.mock() def test_akismet_error_training(self, mock_requests): flag, created = Flag.objects.get_or_create(name=SPAM_TRAINING_FLAG) flag.users.add(self.testuser) assert not DocumentSpamAttempt.objects.exists() rev_form = self.setup_form(mock_requests, is_spam='error') assert rev_form.is_valid() assert DocumentSpamAttempt.objects.count() == 1 attempt = DocumentSpamAttempt.objects.get() assert attempt.user == self.testuser assert attempt.review == DocumentSpamAttempt.AKISMET_ERROR @pytest.mark.spam @requests_mock.mock() def test_akismet_parameters_admin_flag(self, mock_requests): flag, created = Flag.objects.get_or_create(name=SPAM_ADMIN_FLAG) flag.users.add(self.testuser) rev_form = self.setup_form(mock_requests) assert rev_form.is_valid() parameters = rev_form.akismet_parameters() assert parameters['user_role'] == 'administrator' @pytest.mark.spam @requests_mock.mock() def test_akismet_parameters_spammer_flag(self, mock_requests): flag, created = Flag.objects.get_or_create(name=SPAM_SPAMMER_FLAG) flag.users.add(self.testuser) rev_form = self.setup_form(mock_requests, is_spam='true') assert not rev_form.is_valid() parameters = rev_form.akismet_parameters() assert parameters['comment_author'] == 'viagra-test-123' @pytest.mark.spam @requests_mock.mock() def test_akismet_parameters_testing_flag(self, mock_requests): flag, created = Flag.objects.get_or_create(name=SPAM_TESTING_FLAG) flag.users.add(self.testuser) rev_form = self.setup_form(mock_requests) assert rev_form.is_valid() parameters = rev_form.akismet_parameters() assert parameters['is_test'] @pytest.mark.spam @requests_mock.mock() def test_akismet_disabled_template(self, mock_requests): template = { 'content': ( '<% /* This is a template */ %>\n' '<p>Hello, World!</p>\n' ), 'slug': 'Template:HelloWorld', 'tags': '', 'title': 'Template:HelloWorld' } template_edit = template.copy() template_edit['content'] = ( '<% /* This is a template */ %>\n' '<p><strong>Hello, World!</strong></p>\n' ) rev_form = self.setup_form(mock_requests, override_original=template, override_data=template_edit, is_spam='true') assert rev_form.is_valid() assert not rev_form.akismet_enabled() class RevisionFormCreateTests(RevisionFormViewTests): """Test RevisionForm as used in create view.""" view_data = { # Data passed by view, derived from POST 'comment': 'Initial version', 'content': ( '<h2 id="Summary">Summary</h2>\r\n' '<p>Web accessibility is removing barriers that prevent' ' interaction with or access to website.</p>\r\n' ), 'locale': 'en-US', # Added in view from request.LANGUAGE_CODE 'review_tags': ['technical', 'editorial'], 'slug': 'Accessibility', 'tags': '"Accessibility" "Web Development"', 'title': 'Accessibility', 'toc_depth': Revision.TOC_DEPTH_ALL, } def setup_form(self, mock_requests, is_spam='false'): """ Setup a RevisionForm for a POST to create a new page. Parameters: * mock_requests - Mockable requests for Akismet checks """ revision(save=True, slug='Web') parent = revision(save=True, slug='Web/Guide') data = self.view_data.copy() data['parent_topic'] = str(parent.id) request = self.rf.post('/en-US/docs/new') request.user = self.testuser # In the view, the form data's locale is set from the request request.LANGUAGE_CODE = data['locale'] mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(CHECK_URL, content=is_spam) parent_slug = 'Web/Guide' rev_form = RevisionForm(request=request, data=data, parent_slug=parent_slug) return rev_form @pytest.mark.spam @requests_mock.mock() def test_standard_new(self, mock_requests): """Test that new English pages get the standard Akismet parameters.""" rev_form = self.setup_form(mock_requests) assert rev_form.is_valid(), rev_form.errors parameters = rev_form.akismet_parameters() assert sorted(parameters.keys()) == self.akismet_keys assert parameters['blog'] == 'http://testserver/' assert parameters['blog_charset'] == 'UTF-8' assert parameters['blog_lang'] == 'en_us' assert parameters['comment_author'] == 'Test User' assert parameters['comment_author_email'] == self.testuser.email expected_content = ( 'Accessibility\n' 'Web/Guide/Accessibility\n' '<h2 id="Summary">Summary</h2>\n' '<p>Web accessibility is removing barriers that prevent' ' interaction with or access to website.</p>\n' 'Initial version\n' 'Accessibility\n' 'Web Development' ) assert parameters['comment_content'] == expected_content assert parameters['comment_type'] == 'wiki-revision' assert parameters['referrer'] == '' assert parameters['user_agent'] == '' assert parameters['user_ip'] == '127.0.0.1' @requests_mock.mock() @pytest.mark.spam def test_akismet_spam(self, mock_requests): assert DocumentSpamAttempt.objects.count() == 0 assert len(mail.outbox) == 0 rev_form = self.setup_form(mock_requests, is_spam='true') assert not rev_form.is_valid() assert rev_form.errors == {'__all__': [rev_form.akismet_error_message]} assert DocumentSpamAttempt.objects.count() > 0 attempt = DocumentSpamAttempt.objects.latest() assert attempt.title == 'Accessibility' assert attempt.slug == 'Web/Guide/Accessibility' assert attempt.user == self.testuser assert attempt.review == DocumentSpamAttempt.NEEDS_REVIEW assert attempt.data data = json.loads(attempt.data) assert 'akismet_status_code' not in data # Test that one message has been sent. assert len(mail.outbox) == 1 body = mail.outbox[0].body assert attempt.title in body assert attempt.slug in body assert attempt.user.username in body class RevisionFormNewTranslationTests(RevisionFormViewTests): """Test RevisionForm as used to create a page in translate view.""" original = { # Default attributes of original English page 'content': ( '<h2 id="Summary">Summary</h2>\r\n' '<p><strong>HyperText Markup Language (HTML)</strong> is the' ' core language of nearly all Web content.</p>\r\n' ), 'slug': 'Web/Guide/HTML', 'tags': '"HTML" "Landing" "Web"', 'title': 'HTML developer guide', 'toc_depth': Revision.TOC_DEPTH_ALL, } view_data = { # Data passed by view, derived from POST 'comment': u'Traduction initiale', 'content': ( u'<h2 id="Summary">Summary</h2>\r\n' u'<p><strong>HyperText Markup Language (HTML)</strong>, ou' u' <em>langage de balisage hypertexte</em>, est le langage au cœur' u' de presque tout contenu Web.</p>\r\n' ), 'current_rev': '', 'form': 'both', 'locale': 'fr', # Added in view from request.GET to_locale 'localization_tags': ['inprogress'], 'slug': 'HTML', 'tags': '"HTML" "Landing" "Web"', 'title': u'Guide de développement HTML', 'toc_depth': Revision.TOC_DEPTH_ALL, } def setup_form(self, mock_requests): """ Setup a RevisionForm for a POST to create a new translation. Parameters: * mock_requests - Mockable requests for Akismet checks """ revision(save=True, slug='Web') revision(save=True, slug='Web/Guide') original_data = self.original.copy() english_rev = revision(save=True, **original_data) fr_web_doc = document(save=True, slug='Web', locale='fr') revision(save=True, slug='Web', document=fr_web_doc) fr_guide_doc = document(save=True, slug='Web/Guide', locale='fr') revision(save=True, slug='Web/Guide', document=fr_guide_doc) fr_html_doc = document(save=True, slug='Web/Guide/HTML', locale='fr', parent=english_rev.document) initial = { 'based_on': english_rev.id, 'comment': '', 'toc_depth': english_rev.toc_depth, 'localization_tags': ['inprogress'], 'content': english_rev.content, # In view, includes cleaning } request = self.rf.post('/en-US/docs/Web/Guide/HTML$translate') request.user = self.testuser is_spam = 'false' mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(CHECK_URL, content=is_spam) parent_slug = 'Web/Guide' rev_form1 = RevisionForm(request=request, instance=None, initial=initial, parent_slug=parent_slug) assert rev_form1 data = self.view_data.copy() data['based_on'] = str(english_rev.id) rev_form = RevisionForm(request=request, data=data, parent_slug=parent_slug) rev_form.instance.document = fr_html_doc return rev_form @pytest.mark.spam @requests_mock.mock() def test_new_translation(self, mock_requests): """Test Akismet dual locale setting for new translations.""" rev_form = self.setup_form(mock_requests) assert rev_form.is_valid() parameters = rev_form.akismet_parameters() assert sorted(parameters.keys()) == self.akismet_keys assert parameters['blog_lang'] == 'fr, en_us' expected_content = ( u'Guide de développement HTML\n' u'<p><strong>HyperText Markup Language (HTML)</strong>, ou' u' <em>langage de balisage hypertexte</em>, est le langage au cœur' u' de presque tout contenu Web.</p>\n' u'Traduction initiale' ) assert parameters['comment_content'] == expected_content class RevisionFormEditTranslationTests(RevisionFormViewTests): """Test RevisionForm as used to create a page in translate view.""" en_original = { # Default attributes of original English page 'content': ( '<h2 id="Summary">Summary</h2>\r\n' '<p><strong>HyperText Markup Language (HTML)</strong> is the' ' core language of nearly all Web content.</p>\r\n' ), 'slug': 'Web/Guide/HTML', 'tags': '"HTML" "Landing" "Web"', 'title': 'HTML developer guide', 'toc_depth': Revision.TOC_DEPTH_ALL, } fr_original = { # Default attributes of original French page 'content': ( u'<h2 id="Summary">Summary</h2>\r\n' u'<p><strong>HyperText Markup Language (HTML)</strong>, ou' u' <em>langage de balisage hypertexte</em>, est le langage au cœur' u' de presque tout contenu Web.</p>\r\n' ), 'slug': 'Web/Guide/HTML', 'tags': '"HTML" "Landing"', 'title': u'Guide de développement HTML', 'toc_depth': Revision.TOC_DEPTH_ALL, } view_data = { # Data passed by view, derived from POST 'comment': u'Traduction initiale terminée', 'content': ( u'<h2 id="Summary">Summary</h2>\r\n' u'<p><strong>HyperText Markup Language (HTML)</strong>, ou' u' <em>langage de balisage hypertexte</em>, est le langage au cœur' u' de presque tout contenu Web.</p>\r\n' u'<p>La majorité de ce que vous voyez dans votre navigateur est' u' décrit en utilisant HTML.<p>' ), 'current_rev': '', 'form': 'both', 'locale': 'fr', # Added in view from request.GET to_locale 'localization_tags': ['inprogress'], 'slug': 'HTML', 'tags': '"HTML" "Landing" "Web"', 'title': u'Guide de développement HTML', 'toc_depth': Revision.TOC_DEPTH_ALL, } def setup_forms(self, mock_requests): """ Setup two RevisionForms for a POST to edit an existing translation. RevisionForm is validated twice on POST (during Document validation, and during Revision validation and save), so this returns two forms Parameters: * mock_requests - Mockable requests for Akismet checks """ revision(save=True, slug='Web') revision(save=True, slug='Web/Guide') en_rev = revision(save=True, **self.en_original) fr_web_doc = document(save=True, slug='Web', locale='fr') revision(save=True, slug='Web', document=fr_web_doc) fr_guide_doc = document(save=True, slug='Web/Guide', locale='fr') revision(save=True, slug='Web/Guide', document=fr_guide_doc) fr_html_doc = document(save=True, slug='Web/Guide/HTML', locale='fr', parent=en_rev.document) revision(save=True, document=fr_html_doc, **self.fr_original) request = self.rf.post('/fr/docs/Web/Guide/HTML') request.user = self.testuser is_spam = 'false' mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(CHECK_URL, content=is_spam) # Form #1 - Document validation data = self.view_data.copy() data['based_on'] = str(en_rev.id) data['parent_id'] = str(en_rev.document.id) parent_slug = 'Web/Guide' rev_form1 = RevisionForm(request=request, data=data, parent_slug=parent_slug) # Form #2 - Revision validation and saving data = self.view_data.copy() data['based_on'] = str(en_rev.id) data['parent_id'] = str(en_rev.document.id) rev_form2 = RevisionForm(request=request, data=data, parent_slug=parent_slug) rev_form2.instance.document = fr_html_doc return rev_form1, rev_form2 @pytest.mark.spam @requests_mock.mock() def test_edit_translation(self, mock_requests): rev_form1, rev_form2 = self.setup_forms(mock_requests) assert rev_form1.is_valid(), rev_form1.errors assert rev_form2.is_valid(), rev_form2.errors parameters = rev_form2.akismet_parameters() assert sorted(parameters.keys()) == self.akismet_keys_edit assert parameters['blog_lang'] == 'fr, en_us' expected_content = ( u'<p>La majorité de ce que vous voyez dans votre navigateur est' u' décrit en utilisant HTML.<p>\n' u'Traduction initiale terminée\n' u'Web' ) assert parameters['comment_content'] == expected_content assert parameters['permalink'] == ('http://testserver/fr/docs/' 'Web/Guide/HTML') class TreeMoveFormTests(UserTestCase): fixtures = UserTestCase.fixtures + ['wiki/documents.json'] def test_form_properly_strips_leading_cruft(self): """ Tests that leading slash, trailing slash, and {locale}/docs/ are removed if included """ comparisons = [ ['/somedoc', 'somedoc'], # leading slash ['/en-US/docs/mynewplace', 'mynewplace'], # locale and docs ['/docs/one', 'one'], # leading docs ['docs/one', 'one'], # leading docs without slash ['fr/docs/one', 'one'], # foreign locale with docs ['docs/article-title/docs', 'article-title/docs'], # docs with later docs ['/en-US/docs/something/', 'something'] # trailing slash ] for comparison in comparisons: form = TreeMoveForm({'locale': 'en-US', 'title': 'Article', 'slug': comparison[0]}) form.is_valid() self.assertEqual(comparison[1], form.cleaned_data['slug']) def test_form_enforces_parent_doc_to_exist(self): form = TreeMoveForm({'locale': 'en-US', 'title': 'Article', 'slug': 'nothing/article'}) form.is_valid() self.assertTrue(form.errors) self.assertIn(u'Parent', form.errors.as_text()) self.assertIn(u'does not exist', form.errors.as_text())
jgmize/kuma
kuma/wiki/tests/test_forms.py
kuma/wiki/tests/test_tasks.py
"""Support for fans through the SmartThings cloud API.""" from typing import Optional, Sequence from pysmartthings import Capability from homeassistant.components.fan import ( SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SPEED_OFF, SUPPORT_SET_SPEED, FanEntity, ) from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN VALUE_TO_SPEED = {0: SPEED_OFF, 1: SPEED_LOW, 2: SPEED_MEDIUM, 3: SPEED_HIGH} SPEED_TO_VALUE = {v: k for k, v in VALUE_TO_SPEED.items()} async def async_setup_entry(hass, config_entry, async_add_entities): """Add fans for a config entry.""" broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id] async_add_entities( [ SmartThingsFan(device) for device in broker.devices.values() if broker.any_assigned(device.device_id, "fan") ] ) def get_capabilities(capabilities: Sequence[str]) -> Optional[Sequence[str]]: """Return all capabilities supported if minimum required are present.""" supported = [Capability.switch, Capability.fan_speed] # Must have switch and fan_speed if all(capability in capabilities for capability in supported): return supported class SmartThingsFan(SmartThingsEntity, FanEntity): """Define a SmartThings Fan.""" async def async_set_speed(self, speed: str): """Set the speed of the fan.""" value = SPEED_TO_VALUE[speed] await self._device.set_fan_speed(value, set_status=True) # State is set optimistically in the command above, therefore update # the entity state ahead of receiving the confirming push updates self.async_write_ha_state() async def async_turn_on(self, speed: str = None, **kwargs) -> None: """Turn the fan on.""" if speed is not None: value = SPEED_TO_VALUE[speed] await self._device.set_fan_speed(value, set_status=True) else: await self._device.switch_on(set_status=True) # State is set optimistically in the commands above, therefore update # the entity state ahead of receiving the confirming push updates self.async_write_ha_state() async def async_turn_off(self, **kwargs) -> None: """Turn the fan off.""" await self._device.switch_off(set_status=True) # State is set optimistically in the command above, therefore update # the entity state ahead of receiving the confirming push updates self.async_write_ha_state() @property def is_on(self) -> bool: """Return true if fan is on.""" return self._device.status.switch @property def speed(self) -> str: """Return the current speed.""" return VALUE_TO_SPEED[self._device.status.fan_speed] @property def speed_list(self) -> list: """Get the list of available speeds.""" return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH] @property def supported_features(self) -> int: """Flag supported features.""" return SUPPORT_SET_SPEED
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/smartthings/fan.py
"""Sorting helpers for ISY994 device classifications.""" from typing import Any, List, Optional, Union from pyisy.constants import ( ISY_VALUE_UNKNOWN, PROTO_GROUP, PROTO_INSTEON, PROTO_PROGRAM, PROTO_ZWAVE, TAG_FOLDER, ) from pyisy.nodes import Group, Node, Nodes from pyisy.programs import Programs from pyisy.variables import Variables from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR from homeassistant.components.climate.const import DOMAIN as CLIMATE from homeassistant.components.fan import DOMAIN as FAN from homeassistant.components.light import DOMAIN as LIGHT from homeassistant.components.sensor import DOMAIN as SENSOR from homeassistant.components.switch import DOMAIN as SWITCH from homeassistant.helpers.entity_registry import async_get_registry from homeassistant.helpers.typing import HomeAssistantType from .const import ( _LOGGER, DEFAULT_PROGRAM_STRING, DOMAIN, FILTER_INSTEON_TYPE, FILTER_NODE_DEF_ID, FILTER_STATES, FILTER_UOM, FILTER_ZWAVE_CAT, ISY994_NODES, ISY994_PROGRAMS, ISY994_VARIABLES, ISY_GROUP_PLATFORM, KEY_ACTIONS, KEY_STATUS, NODE_FILTERS, SUBNODE_CLIMATE_COOL, SUBNODE_CLIMATE_HEAT, SUBNODE_EZIO2X4_SENSORS, SUBNODE_FANLINC_LIGHT, SUBNODE_IOLINC_RELAY, SUPPORTED_PLATFORMS, SUPPORTED_PROGRAM_PLATFORMS, TYPE_CATEGORY_SENSOR_ACTUATORS, TYPE_EZIO2X4, UOM_DOUBLE_TEMP, UOM_ISYV4_DEGREES, ) BINARY_SENSOR_UOMS = ["2", "78"] BINARY_SENSOR_ISY_STATES = ["on", "off"] def _check_for_node_def( hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None ) -> bool: """Check if the node matches the node_def_id for any platforms. This is only present on the 5.0 ISY firmware, and is the most reliable way to determine a device's type. """ if not hasattr(node, "node_def_id") or node.node_def_id is None: # Node doesn't have a node_def (pre 5.0 firmware most likely) return False node_def_id = node.node_def_id platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform] for platform in platforms: if node_def_id in NODE_FILTERS[platform][FILTER_NODE_DEF_ID]: hass_isy_data[ISY994_NODES][platform].append(node) return True return False def _check_for_insteon_type( hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None ) -> bool: """Check if the node matches the Insteon type for any platforms. This is for (presumably) every version of the ISY firmware, but only works for Insteon device. "Node Server" (v5+) and Z-Wave and others will not have a type. """ if not hasattr(node, "protocol") or node.protocol != PROTO_INSTEON: return False if not hasattr(node, "type") or node.type is None: # Node doesn't have a type (non-Insteon device most likely) return False device_type = node.type platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform] for platform in platforms: if any( [ device_type.startswith(t) for t in set(NODE_FILTERS[platform][FILTER_INSTEON_TYPE]) ] ): # Hacky special-cases for certain devices with different platforms # included as subnodes. Note that special-cases are not necessary # on ISY 5.x firmware as it uses the superior NodeDefs method subnode_id = int(node.address.split(" ")[-1], 16) # FanLinc, which has a light module as one of its nodes. if platform == FAN and subnode_id == SUBNODE_FANLINC_LIGHT: hass_isy_data[ISY994_NODES][LIGHT].append(node) return True # Thermostats, which has a "Heat" and "Cool" sub-node on address 2 and 3 if platform == CLIMATE and subnode_id in [ SUBNODE_CLIMATE_COOL, SUBNODE_CLIMATE_HEAT, ]: hass_isy_data[ISY994_NODES][BINARY_SENSOR].append(node) return True # IOLincs which have a sensor and relay on 2 different nodes if ( platform == BINARY_SENSOR and device_type.startswith(TYPE_CATEGORY_SENSOR_ACTUATORS) and subnode_id == SUBNODE_IOLINC_RELAY ): hass_isy_data[ISY994_NODES][SWITCH].append(node) return True # Smartenit EZIO2X4 if ( platform == SWITCH and device_type.startswith(TYPE_EZIO2X4) and subnode_id in SUBNODE_EZIO2X4_SENSORS ): hass_isy_data[ISY994_NODES][BINARY_SENSOR].append(node) return True hass_isy_data[ISY994_NODES][platform].append(node) return True return False def _check_for_zwave_cat( hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None ) -> bool: """Check if the node matches the ISY Z-Wave Category for any platforms. This is for (presumably) every version of the ISY firmware, but only works for Z-Wave Devices with the devtype.cat property. """ if not hasattr(node, "protocol") or node.protocol != PROTO_ZWAVE: return False if not hasattr(node, "zwave_props") or node.zwave_props is None: # Node doesn't have a device type category (non-Z-Wave device) return False device_type = node.zwave_props.category platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform] for platform in platforms: if any( [ device_type.startswith(t) for t in set(NODE_FILTERS[platform][FILTER_ZWAVE_CAT]) ] ): hass_isy_data[ISY994_NODES][platform].append(node) return True return False def _check_for_uom_id( hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None, uom_list: list = None, ) -> bool: """Check if a node's uom matches any of the platforms uom filter. This is used for versions of the ISY firmware that report uoms as a single ID. We can often infer what type of device it is by that ID. """ if not hasattr(node, "uom") or node.uom in [None, ""]: # Node doesn't have a uom (Scenes for example) return False # Backwards compatibility for ISYv4 Firmware: node_uom = node.uom if isinstance(node.uom, list): node_uom = node.uom[0] if uom_list: if node_uom in uom_list: hass_isy_data[ISY994_NODES][single_platform].append(node) return True return False platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform] for platform in platforms: if node_uom in NODE_FILTERS[platform][FILTER_UOM]: hass_isy_data[ISY994_NODES][platform].append(node) return True return False def _check_for_states_in_uom( hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None, states_list: list = None, ) -> bool: """Check if a list of uoms matches two possible filters. This is for versions of the ISY firmware that report uoms as a list of all possible "human readable" states. This filter passes if all of the possible states fit inside the given filter. """ if not hasattr(node, "uom") or node.uom in [None, ""]: # Node doesn't have a uom (Scenes for example) return False # This only works for ISYv4 Firmware where uom is a list of states: if not isinstance(node.uom, list): return False node_uom = set(map(str.lower, node.uom)) if states_list: if node_uom == set(states_list): hass_isy_data[ISY994_NODES][single_platform].append(node) return True return False platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform] for platform in platforms: if node_uom == set(NODE_FILTERS[platform][FILTER_STATES]): hass_isy_data[ISY994_NODES][platform].append(node) return True return False def _is_sensor_a_binary_sensor(hass_isy_data: dict, node: Union[Group, Node]) -> bool: """Determine if the given sensor node should be a binary_sensor.""" if _check_for_node_def(hass_isy_data, node, single_platform=BINARY_SENSOR): return True if _check_for_insteon_type(hass_isy_data, node, single_platform=BINARY_SENSOR): return True # For the next two checks, we're providing our own set of uoms that # represent on/off devices. This is because we can only depend on these # checks in the context of already knowing that this is definitely a # sensor device. if _check_for_uom_id( hass_isy_data, node, single_platform=BINARY_SENSOR, uom_list=BINARY_SENSOR_UOMS ): return True if _check_for_states_in_uom( hass_isy_data, node, single_platform=BINARY_SENSOR, states_list=BINARY_SENSOR_ISY_STATES, ): return True return False def _categorize_nodes( hass_isy_data: dict, nodes: Nodes, ignore_identifier: str, sensor_identifier: str ) -> None: """Sort the nodes to their proper platforms.""" for (path, node) in nodes: ignored = ignore_identifier in path or ignore_identifier in node.name if ignored: # Don't import this node as a device at all continue if hasattr(node, "protocol") and node.protocol == PROTO_GROUP: hass_isy_data[ISY994_NODES][ISY_GROUP_PLATFORM].append(node) continue if sensor_identifier in path or sensor_identifier in node.name: # User has specified to treat this as a sensor. First we need to # determine if it should be a binary_sensor. if _is_sensor_a_binary_sensor(hass_isy_data, node): continue hass_isy_data[ISY994_NODES][SENSOR].append(node) continue # We have a bunch of different methods for determining the device type, # each of which works with different ISY firmware versions or device # family. The order here is important, from most reliable to least. if _check_for_node_def(hass_isy_data, node): continue if _check_for_insteon_type(hass_isy_data, node): continue if _check_for_zwave_cat(hass_isy_data, node): continue if _check_for_uom_id(hass_isy_data, node): continue if _check_for_states_in_uom(hass_isy_data, node): continue # Fallback as as sensor, e.g. for un-sortable items like NodeServer nodes. hass_isy_data[ISY994_NODES][SENSOR].append(node) def _categorize_programs(hass_isy_data: dict, programs: Programs) -> None: """Categorize the ISY994 programs.""" for platform in SUPPORTED_PROGRAM_PLATFORMS: folder = programs.get_by_name(f"{DEFAULT_PROGRAM_STRING}{platform}") if not folder: continue for dtype, _, node_id in folder.children: if dtype != TAG_FOLDER: continue entity_folder = folder[node_id] actions = None status = entity_folder.get_by_name(KEY_STATUS) if not status or not status.protocol == PROTO_PROGRAM: _LOGGER.warning( "Program %s entity '%s' not loaded, invalid/missing status program", platform, entity_folder.name, ) continue if platform != BINARY_SENSOR: actions = entity_folder.get_by_name(KEY_ACTIONS) if not actions or not actions.protocol == PROTO_PROGRAM: _LOGGER.warning( "Program %s entity '%s' not loaded, invalid/missing actions program", platform, entity_folder.name, ) continue entity = (entity_folder.name, status, actions) hass_isy_data[ISY994_PROGRAMS][platform].append(entity) def _categorize_variables( hass_isy_data: dict, variables: Variables, identifier: str ) -> None: """Gather the ISY994 Variables to be added as sensors.""" try: var_to_add = [ (vtype, vname, vid) for (vtype, vname, vid) in variables.children if identifier in vname ] except KeyError as err: _LOGGER.error("Error adding ISY Variables: %s", err) return for vtype, vname, vid in var_to_add: hass_isy_data[ISY994_VARIABLES].append((vname, variables[vtype][vid])) async def migrate_old_unique_ids( hass: HomeAssistantType, platform: str, devices: Optional[List[Any]] ) -> None: """Migrate to new controller-specific unique ids.""" registry = await async_get_registry(hass) for device in devices: old_entity_id = registry.async_get_entity_id( platform, DOMAIN, device.old_unique_id ) if old_entity_id is not None: _LOGGER.debug( "Migrating unique_id from [%s] to [%s]", device.old_unique_id, device.unique_id, ) registry.async_update_entity(old_entity_id, new_unique_id=device.unique_id) old_entity_id_2 = registry.async_get_entity_id( platform, DOMAIN, device.unique_id.replace(":", "") ) if old_entity_id_2 is not None: _LOGGER.debug( "Migrating unique_id from [%s] to [%s]", device.unique_id.replace(":", ""), device.unique_id, ) registry.async_update_entity( old_entity_id_2, new_unique_id=device.unique_id ) def convert_isy_value_to_hass( value: Union[int, float, None], uom: str, precision: str, fallback_precision: Optional[int] = None, ) -> Union[float, int]: """Fix ISY Reported Values. ISY provides float values as an integer and precision component. Correct by shifting the decimal place left by the value of precision. (e.g. value=2345, prec="2" == 23.45) Insteon Thermostats report temperature in 0.5-deg precision as an int by sending a value of 2 times the Temp. Correct by dividing by 2 here. """ if value is None or value == ISY_VALUE_UNKNOWN: return None if uom in [UOM_DOUBLE_TEMP, UOM_ISYV4_DEGREES]: return round(float(value) / 2.0, 1) if precision != "0": return round(float(value) / 10 ** int(precision), int(precision)) if fallback_precision: return round(float(value), fallback_precision) return value
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/isy994/helpers.py
"""Counter for the days until an HTTPS (TLS) certificate will expire.""" from datetime import timedelta import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_HOST, CONF_PORT, DEVICE_CLASS_TIMESTAMP, EVENT_HOMEASSISTANT_START, TIME_DAYS, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_call_later from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt from .const import DEFAULT_PORT, DOMAIN SCAN_INTERVAL = timedelta(hours=12) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up certificate expiry sensor.""" @callback def schedule_import(_): """Schedule delayed import after HA is fully started.""" async_call_later(hass, 10, do_import) @callback def do_import(_): """Process YAML import.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=dict(config) ) ) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, schedule_import) async def async_setup_entry(hass, entry, async_add_entities): """Add cert-expiry entry.""" coordinator = hass.data[DOMAIN][entry.entry_id] sensors = [ SSLCertificateDays(coordinator), SSLCertificateTimestamp(coordinator), ] async_add_entities(sensors, True) class CertExpiryEntity(CoordinatorEntity): """Defines a base Cert Expiry entity.""" @property def icon(self): """Icon to use in the frontend, if any.""" return "mdi:certificate" @property def device_state_attributes(self): """Return additional sensor state attributes.""" return { "is_valid": self.coordinator.is_cert_valid, "error": str(self.coordinator.cert_error), } class SSLCertificateDays(CertExpiryEntity): """Implementation of the Cert Expiry days sensor.""" @property def name(self): """Return the name of the sensor.""" return f"Cert Expiry ({self.coordinator.name})" @property def state(self): """Return the state of the sensor.""" if not self.coordinator.is_cert_valid: return 0 expiry = self.coordinator.data - dt.utcnow() return expiry.days @property def unique_id(self): """Return a unique id for the sensor.""" return f"{self.coordinator.host}:{self.coordinator.port}" @property def unit_of_measurement(self): """Return the unit this state is expressed in.""" return TIME_DAYS class SSLCertificateTimestamp(CertExpiryEntity): """Implementation of the Cert Expiry timestamp sensor.""" @property def device_class(self): """Return the device class of the sensor.""" return DEVICE_CLASS_TIMESTAMP @property def name(self): """Return the name of the sensor.""" return f"Cert Expiry Timestamp ({self.coordinator.name})" @property def state(self): """Return the state of the sensor.""" if self.coordinator.data: return self.coordinator.data.isoformat() return None @property def unique_id(self): """Return a unique id for the sensor.""" return f"{self.coordinator.host}:{self.coordinator.port}-timestamp"
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/cert_expiry/sensor.py
"""Platform for Time of Flight sensor VL53L1X from STMicroelectronics.""" import asyncio from functools import partial from VL53L1X2 import VL53L1X # pylint: disable=import-error import voluptuous as vol from homeassistant.components import rpi_gpio from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME, LENGTH_MILLIMETERS import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity CONF_I2C_ADDRESS = "i2c_address" CONF_I2C_BUS = "i2c_bus" CONF_XSHUT = "xshut" DEFAULT_NAME = "VL53L1X" DEFAULT_I2C_ADDRESS = 0x29 DEFAULT_I2C_BUS = 1 DEFAULT_XSHUT = 16 DEFAULT_RANGE = 2 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int), vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): vol.Coerce(int), vol.Optional(CONF_XSHUT, default=DEFAULT_XSHUT): cv.positive_int, } ) def init_tof_0(xshut, sensor): """XSHUT port LOW resets the device.""" sensor.open() rpi_gpio.setup_output(xshut) rpi_gpio.write_output(xshut, 0) def init_tof_1(xshut): """XSHUT port HIGH enables the device.""" rpi_gpio.setup_output(xshut) rpi_gpio.write_output(xshut, 1) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Reset and initialize the VL53L1X ToF Sensor from STMicroelectronics.""" name = config.get(CONF_NAME) bus_number = config.get(CONF_I2C_BUS) i2c_address = config.get(CONF_I2C_ADDRESS) unit = LENGTH_MILLIMETERS xshut = config.get(CONF_XSHUT) sensor = await hass.async_add_executor_job(partial(VL53L1X, bus_number)) await hass.async_add_executor_job(init_tof_0, xshut, sensor) await asyncio.sleep(0.01) await hass.async_add_executor_job(init_tof_1, xshut) await asyncio.sleep(0.01) dev = [VL53L1XSensor(sensor, name, unit, i2c_address)] async_add_entities(dev, True) class VL53L1XSensor(Entity): """Implementation of VL53L1X sensor.""" def __init__(self, vl53l1x_sensor, name, unit, i2c_address): """Initialize the sensor.""" self._name = name self._unit_of_measurement = unit self.vl53l1x_sensor = vl53l1x_sensor self.i2c_address = i2c_address self._state = None self.init = True @property def name(self) -> str: """Return the name of the sensor.""" return self._name @property def state(self) -> int: """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self) -> str: """Return the unit of measurement.""" return self._unit_of_measurement def update(self): """Get the latest measurement and update state.""" if self.init: self.vl53l1x_sensor.add_sensor(self.i2c_address, self.i2c_address) self.init = False self.vl53l1x_sensor.start_ranging(self.i2c_address, DEFAULT_RANGE) self.vl53l1x_sensor.update(self.i2c_address) self.vl53l1x_sensor.stop_ranging(self.i2c_address) self._state = self.vl53l1x_sensor.distance
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/tof/sensor.py
"""Support for MySensors sensors.""" from homeassistant.components import mysensors from homeassistant.components.sensor import DOMAIN from homeassistant.const import ( CONDUCTIVITY, DEGREE, ELECTRICAL_CURRENT_AMPERE, ELECTRICAL_VOLT_AMPERE, ENERGY_KILO_WATT_HOUR, FREQUENCY_HERTZ, LENGTH_METERS, LIGHT_LUX, MASS_KILOGRAMS, PERCENTAGE, POWER_WATT, TEMP_CELSIUS, TEMP_FAHRENHEIT, VOLT, VOLUME_CUBIC_METERS, ) SENSORS = { "V_TEMP": [None, "mdi:thermometer"], "V_HUM": [PERCENTAGE, "mdi:water-percent"], "V_DIMMER": [PERCENTAGE, "mdi:percent"], "V_PERCENTAGE": [PERCENTAGE, "mdi:percent"], "V_PRESSURE": [None, "mdi:gauge"], "V_FORECAST": [None, "mdi:weather-partly-cloudy"], "V_RAIN": [None, "mdi:weather-rainy"], "V_RAINRATE": [None, "mdi:weather-rainy"], "V_WIND": [None, "mdi:weather-windy"], "V_GUST": [None, "mdi:weather-windy"], "V_DIRECTION": [DEGREE, "mdi:compass"], "V_WEIGHT": [MASS_KILOGRAMS, "mdi:weight-kilogram"], "V_DISTANCE": [LENGTH_METERS, "mdi:ruler"], "V_IMPEDANCE": ["ohm", None], "V_WATT": [POWER_WATT, None], "V_KWH": [ENERGY_KILO_WATT_HOUR, None], "V_LIGHT_LEVEL": [PERCENTAGE, "mdi:white-balance-sunny"], "V_FLOW": [LENGTH_METERS, "mdi:gauge"], "V_VOLUME": [f"{VOLUME_CUBIC_METERS}", None], "V_LEVEL": { "S_SOUND": ["dB", "mdi:volume-high"], "S_VIBRATION": [FREQUENCY_HERTZ, None], "S_LIGHT_LEVEL": [LIGHT_LUX, "mdi:white-balance-sunny"], }, "V_VOLTAGE": [VOLT, "mdi:flash"], "V_CURRENT": [ELECTRICAL_CURRENT_AMPERE, "mdi:flash-auto"], "V_PH": ["pH", None], "V_ORP": ["mV", None], "V_EC": [CONDUCTIVITY, None], "V_VAR": ["var", None], "V_VA": [ELECTRICAL_VOLT_AMPERE, None], } async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the MySensors platform for sensors.""" mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, MySensorsSensor, async_add_entities=async_add_entities, ) class MySensorsSensor(mysensors.device.MySensorsEntity): """Representation of a MySensors Sensor child node.""" @property def force_update(self): """Return True if state updates should be forced. If True, a state change will be triggered anytime the state property is updated, not just when the value changes. """ return True @property def state(self): """Return the state of the device.""" return self._values.get(self.value_type) @property def icon(self): """Return the icon to use in the frontend, if any.""" _, icon = self._get_sensor_type() return icon @property def unit_of_measurement(self): """Return the unit of measurement of this entity.""" set_req = self.gateway.const.SetReq if ( float(self.gateway.protocol_version) >= 1.5 and set_req.V_UNIT_PREFIX in self._values ): return self._values[set_req.V_UNIT_PREFIX] unit, _ = self._get_sensor_type() return unit def _get_sensor_type(self): """Return list with unit and icon of sensor type.""" pres = self.gateway.const.Presentation set_req = self.gateway.const.SetReq SENSORS[set_req.V_TEMP.name][0] = ( TEMP_CELSIUS if self.gateway.metric else TEMP_FAHRENHEIT ) sensor_type = SENSORS.get(set_req(self.value_type).name, [None, None]) if isinstance(sensor_type, dict): sensor_type = sensor_type.get(pres(self.child_type).name, [None, None]) return sensor_type
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/mysensors/sensor.py
"""Sensor for Suez Water Consumption data.""" from datetime import timedelta import logging from pysuez import SuezClient from pysuez.client import PySuezError import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, VOLUME_LITERS import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) CONF_COUNTER_ID = "counter_id" SCAN_INTERVAL = timedelta(hours=12) COMPONENT_ICON = "mdi:water-pump" COMPONENT_NAME = "Suez Water Client" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_COUNTER_ID): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the sensor platform.""" username = config[CONF_USERNAME] password = config[CONF_PASSWORD] counter_id = config[CONF_COUNTER_ID] try: client = SuezClient(username, password, counter_id) if not client.check_credentials(): _LOGGER.warning("Wrong username and/or password") return except PySuezError: _LOGGER.warning("Unable to create Suez Client") return add_entities([SuezSensor(client)], True) class SuezSensor(Entity): """Representation of a Sensor.""" def __init__(self, client): """Initialize the data object.""" self._attributes = {} self._state = None self._available = None self.client = client @property def name(self): """Return the name of the sensor.""" return COMPONENT_NAME @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement.""" return VOLUME_LITERS @property def device_state_attributes(self): """Return the state attributes.""" return self._attributes @property def icon(self): """Return the icon of the sensor.""" return COMPONENT_ICON def _fetch_data(self): """Fetch latest data from Suez.""" try: self.client.update() # _state holds the volume of consumed water during previous day self._state = self.client.state self._available = True self._attributes["attribution"] = self.client.attributes["attribution"] self._attributes["this_month_consumption"] = {} for item in self.client.attributes["thisMonthConsumption"]: self._attributes["this_month_consumption"][ item ] = self.client.attributes["thisMonthConsumption"][item] self._attributes["previous_month_consumption"] = {} for item in self.client.attributes["previousMonthConsumption"]: self._attributes["previous_month_consumption"][ item ] = self.client.attributes["previousMonthConsumption"][item] self._attributes["highest_monthly_consumption"] = self.client.attributes[ "highestMonthlyConsumption" ] self._attributes["last_year_overall"] = self.client.attributes[ "lastYearOverAll" ] self._attributes["this_year_overall"] = self.client.attributes[ "thisYearOverAll" ] self._attributes["history"] = {} for item in self.client.attributes["history"]: self._attributes["history"][item] = self.client.attributes["history"][ item ] except PySuezError: self._available = False _LOGGER.warning("Unable to fetch data") def update(self): """Return the latest collected data from Linky.""" self._fetch_data() _LOGGER.debug("Suez data state is: %s", self._state)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/suez_water/sensor.py
"""Helpers for LCN component.""" import re import voluptuous as vol from homeassistant.const import CONF_NAME from .const import DEFAULT_NAME # Regex for address validation PATTERN_ADDRESS = re.compile( "^((?P<conn_id>\\w+)\\.)?s?(?P<seg_id>\\d+)\\.(?P<type>m|g)?(?P<id>\\d+)$" ) def get_connection(connections, connection_id=None): """Return the connection object from list.""" if connection_id is None: connection = connections[0] else: for connection in connections: if connection.connection_id == connection_id: break else: raise ValueError("Unknown connection_id.") return connection def has_unique_connection_names(connections): """Validate that all connection names are unique. Use 'pchk' as default connection_name (or add a numeric suffix if pchk' is already in use. """ for suffix, connection in enumerate(connections): connection_name = connection.get(CONF_NAME) if connection_name is None: if suffix == 0: connection[CONF_NAME] = DEFAULT_NAME else: connection[CONF_NAME] = f"{DEFAULT_NAME}{suffix:d}" schema = vol.Schema(vol.Unique()) schema([connection.get(CONF_NAME) for connection in connections]) return connections def is_address(value): """Validate the given address string. Examples for S000M005 at myhome: myhome.s000.m005 myhome.s0.m5 myhome.0.5 ("m" is implicit if missing) Examples for s000g011 myhome.0.g11 myhome.s0.g11 """ matcher = PATTERN_ADDRESS.match(value) if matcher: is_group = matcher.group("type") == "g" addr = (int(matcher.group("seg_id")), int(matcher.group("id")), is_group) conn_id = matcher.group("conn_id") return addr, conn_id raise vol.error.Invalid("Not a valid address string.") def is_relays_states_string(states_string): """Validate the given states string and return states list.""" if len(states_string) == 8: states = [] for state_string in states_string: if state_string == "1": state = "ON" elif state_string == "0": state = "OFF" elif state_string == "T": state = "TOGGLE" elif state_string == "-": state = "NOCHANGE" else: raise vol.error.Invalid("Not a valid relay state string.") states.append(state) return states raise vol.error.Invalid("Wrong length of relay state string.") def is_key_lock_states_string(states_string): """Validate the given states string and returns states list.""" if len(states_string) == 8: states = [] for state_string in states_string: if state_string == "1": state = "ON" elif state_string == "0": state = "OFF" elif state_string == "T": state = "TOGGLE" elif state_string == "-": state = "NOCHANGE" else: raise vol.error.Invalid("Not a valid key lock state string.") states.append(state) return states raise vol.error.Invalid("Wrong length of key lock state string.")
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/lcn/helpers.py
"""Constants for the Coolmaster integration.""" from homeassistant.components.climate.const import ( HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT, HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF, ) DATA_INFO = "info" DATA_COORDINATOR = "coordinator" DOMAIN = "coolmaster" DEFAULT_PORT = 10102 CONF_SUPPORTED_MODES = "supported_modes" AVAILABLE_MODES = [ HVAC_MODE_OFF, HVAC_MODE_HEAT, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_HEAT_COOL, HVAC_MODE_FAN_ONLY, ]
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/coolmaster/const.py
"""Adds config flow for AccuWeather.""" import asyncio from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError from aiohttp import ClientError from aiohttp.client_exceptions import ClientConnectorError from async_timeout import timeout import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from .const import CONF_FORECAST, DOMAIN # pylint:disable=unused-import class AccuWeatherFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Config flow for AccuWeather.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" # Under the terms of use of the API, one user can use one free API key. Due to # the small number of requests allowed, we only allow one integration instance. if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") errors = {} if user_input is not None: websession = async_get_clientsession(self.hass) try: async with timeout(10): accuweather = AccuWeather( user_input[CONF_API_KEY], websession, latitude=user_input[CONF_LATITUDE], longitude=user_input[CONF_LONGITUDE], ) await accuweather.async_get_location() except (ApiError, ClientConnectorError, asyncio.TimeoutError, ClientError): errors["base"] = "cannot_connect" except InvalidApiKeyError: errors[CONF_API_KEY] = "invalid_api_key" except RequestsExceededError: errors[CONF_API_KEY] = "requests_exceeded" else: await self.async_set_unique_id( accuweather.location_key, raise_on_progress=False ) return self.async_create_entry( title=user_input[CONF_NAME], data=user_input ) return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_API_KEY): str, vol.Optional( CONF_LATITUDE, default=self.hass.config.latitude ): cv.latitude, vol.Optional( CONF_LONGITUDE, default=self.hass.config.longitude ): cv.longitude, vol.Optional( CONF_NAME, default=self.hass.config.location_name ): str, } ), errors=errors, ) @staticmethod @callback def async_get_options_flow(config_entry): """Options callback for AccuWeather.""" return AccuWeatherOptionsFlowHandler(config_entry) class AccuWeatherOptionsFlowHandler(config_entries.OptionsFlow): """Config flow options for AccuWeather.""" def __init__(self, config_entry): """Initialize AccuWeather options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage the options.""" return await self.async_step_user() async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Optional( CONF_FORECAST, default=self.config_entry.options.get(CONF_FORECAST, False), ): bool } ), )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/accuweather/config_flow.py
"""Sensor for the CityBikes data.""" import asyncio from datetime import timedelta import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.components.sensor import ENTITY_ID_FORMAT, PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_ID, ATTR_LATITUDE, ATTR_LOCATION, ATTR_LONGITUDE, ATTR_NAME, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, CONF_RADIUS, LENGTH_FEET, LENGTH_METERS, ) from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity, async_generate_entity_id from homeassistant.helpers.event import async_track_time_interval from homeassistant.util import distance, location _LOGGER = logging.getLogger(__name__) ATTR_EMPTY_SLOTS = "empty_slots" ATTR_EXTRA = "extra" ATTR_FREE_BIKES = "free_bikes" ATTR_NETWORK = "network" ATTR_NETWORKS_LIST = "networks" ATTR_STATIONS_LIST = "stations" ATTR_TIMESTAMP = "timestamp" ATTR_UID = "uid" CONF_NETWORK = "network" CONF_STATIONS_LIST = "stations" DEFAULT_ENDPOINT = "https://api.citybik.es/{uri}" PLATFORM = "citybikes" MONITORED_NETWORKS = "monitored-networks" NETWORKS_URI = "v2/networks" REQUEST_TIMEOUT = 5 # In seconds; argument to asyncio.timeout SCAN_INTERVAL = timedelta(minutes=5) # Timely, and doesn't suffocate the API STATIONS_URI = "v2/networks/{uid}?fields=network.stations" CITYBIKES_ATTRIBUTION = ( "Information provided by the CityBikes Project (https://citybik.es/#about)" ) CITYBIKES_NETWORKS = "citybikes_networks" PLATFORM_SCHEMA = vol.All( cv.has_at_least_one_key(CONF_RADIUS, CONF_STATIONS_LIST), PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=""): cv.string, vol.Optional(CONF_NETWORK): cv.string, vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, vol.Optional(CONF_RADIUS, "station_filter"): cv.positive_int, vol.Optional(CONF_STATIONS_LIST, "station_filter"): vol.All( cv.ensure_list, vol.Length(min=1), [cv.string] ), } ), ) NETWORK_SCHEMA = vol.Schema( { vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_LOCATION): vol.Schema( { vol.Required(ATTR_LATITUDE): cv.latitude, vol.Required(ATTR_LONGITUDE): cv.longitude, }, extra=vol.REMOVE_EXTRA, ), }, extra=vol.REMOVE_EXTRA, ) NETWORKS_RESPONSE_SCHEMA = vol.Schema( {vol.Required(ATTR_NETWORKS_LIST): [NETWORK_SCHEMA]} ) STATION_SCHEMA = vol.Schema( { vol.Required(ATTR_FREE_BIKES): cv.positive_int, vol.Required(ATTR_EMPTY_SLOTS): vol.Any(cv.positive_int, None), vol.Required(ATTR_LATITUDE): cv.latitude, vol.Required(ATTR_LONGITUDE): cv.longitude, vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_TIMESTAMP): cv.string, vol.Optional(ATTR_EXTRA): vol.Schema( {vol.Optional(ATTR_UID): cv.string}, extra=vol.REMOVE_EXTRA ), }, extra=vol.REMOVE_EXTRA, ) STATIONS_RESPONSE_SCHEMA = vol.Schema( { vol.Required(ATTR_NETWORK): vol.Schema( {vol.Required(ATTR_STATIONS_LIST): [STATION_SCHEMA]}, extra=vol.REMOVE_EXTRA ) } ) class CityBikesRequestError(Exception): """Error to indicate a CityBikes API request has failed.""" async def async_citybikes_request(hass, uri, schema): """Perform a request to CityBikes API endpoint, and parse the response.""" try: session = async_get_clientsession(hass) with async_timeout.timeout(REQUEST_TIMEOUT): req = await session.get(DEFAULT_ENDPOINT.format(uri=uri)) json_response = await req.json() return schema(json_response) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Could not connect to CityBikes API endpoint") except ValueError: _LOGGER.error("Received non-JSON data from CityBikes API endpoint") except vol.Invalid as err: _LOGGER.error("Received unexpected JSON from CityBikes API endpoint: %s", err) raise CityBikesRequestError async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the CityBikes platform.""" if PLATFORM not in hass.data: hass.data[PLATFORM] = {MONITORED_NETWORKS: {}} latitude = config.get(CONF_LATITUDE, hass.config.latitude) longitude = config.get(CONF_LONGITUDE, hass.config.longitude) network_id = config.get(CONF_NETWORK) stations_list = set(config.get(CONF_STATIONS_LIST, [])) radius = config.get(CONF_RADIUS, 0) name = config[CONF_NAME] if not hass.config.units.is_metric: radius = distance.convert(radius, LENGTH_FEET, LENGTH_METERS) # Create a single instance of CityBikesNetworks. networks = hass.data.setdefault(CITYBIKES_NETWORKS, CityBikesNetworks(hass)) if not network_id: network_id = await networks.get_closest_network_id(latitude, longitude) if network_id not in hass.data[PLATFORM][MONITORED_NETWORKS]: network = CityBikesNetwork(hass, network_id) hass.data[PLATFORM][MONITORED_NETWORKS][network_id] = network hass.async_create_task(network.async_refresh()) async_track_time_interval(hass, network.async_refresh, SCAN_INTERVAL) else: network = hass.data[PLATFORM][MONITORED_NETWORKS][network_id] await network.ready.wait() devices = [] for station in network.stations: dist = location.distance( latitude, longitude, station[ATTR_LATITUDE], station[ATTR_LONGITUDE] ) station_id = station[ATTR_ID] station_uid = str(station.get(ATTR_EXTRA, {}).get(ATTR_UID, "")) if radius > dist or stations_list.intersection((station_id, station_uid)): if name: uid = "_".join([network.network_id, name, station_id]) else: uid = "_".join([network.network_id, station_id]) entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, uid, hass=hass) devices.append(CityBikesStation(network, station_id, entity_id)) async_add_entities(devices, True) class CityBikesNetworks: """Represent all CityBikes networks.""" def __init__(self, hass): """Initialize the networks instance.""" self.hass = hass self.networks = None self.networks_loading = asyncio.Condition() async def get_closest_network_id(self, latitude, longitude): """Return the id of the network closest to provided location.""" try: await self.networks_loading.acquire() if self.networks is None: networks = await async_citybikes_request( self.hass, NETWORKS_URI, NETWORKS_RESPONSE_SCHEMA ) self.networks = networks[ATTR_NETWORKS_LIST] result = None minimum_dist = None for network in self.networks: network_latitude = network[ATTR_LOCATION][ATTR_LATITUDE] network_longitude = network[ATTR_LOCATION][ATTR_LONGITUDE] dist = location.distance( latitude, longitude, network_latitude, network_longitude ) if minimum_dist is None or dist < minimum_dist: minimum_dist = dist result = network[ATTR_ID] return result except CityBikesRequestError as err: raise PlatformNotReady from err finally: self.networks_loading.release() class CityBikesNetwork: """Thin wrapper around a CityBikes network object.""" def __init__(self, hass, network_id): """Initialize the network object.""" self.hass = hass self.network_id = network_id self.stations = [] self.ready = asyncio.Event() async def async_refresh(self, now=None): """Refresh the state of the network.""" try: network = await async_citybikes_request( self.hass, STATIONS_URI.format(uid=self.network_id), STATIONS_RESPONSE_SCHEMA, ) self.stations = network[ATTR_NETWORK][ATTR_STATIONS_LIST] self.ready.set() except CityBikesRequestError as err: if now is not None: self.ready.clear() else: raise PlatformNotReady from err class CityBikesStation(Entity): """CityBikes API Sensor.""" def __init__(self, network, station_id, entity_id): """Initialize the sensor.""" self._network = network self._station_id = station_id self._station_data = {} self.entity_id = entity_id @property def state(self): """Return the state of the sensor.""" return self._station_data.get(ATTR_FREE_BIKES) @property def name(self): """Return the name of the sensor.""" return self._station_data.get(ATTR_NAME) async def async_update(self): """Update station state.""" for station in self._network.stations: if station[ATTR_ID] == self._station_id: self._station_data = station break @property def device_state_attributes(self): """Return the state attributes.""" if self._station_data: return { ATTR_ATTRIBUTION: CITYBIKES_ATTRIBUTION, ATTR_UID: self._station_data.get(ATTR_EXTRA, {}).get(ATTR_UID), ATTR_LATITUDE: self._station_data[ATTR_LATITUDE], ATTR_LONGITUDE: self._station_data[ATTR_LONGITUDE], ATTR_EMPTY_SLOTS: self._station_data[ATTR_EMPTY_SLOTS], ATTR_TIMESTAMP: self._station_data[ATTR_TIMESTAMP], } return {ATTR_ATTRIBUTION: CITYBIKES_ATTRIBUTION} @property def unit_of_measurement(self): """Return the unit of measurement.""" return "bikes" @property def icon(self): """Return the icon.""" return "mdi:bike"
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/citybikes/sensor.py
"""Config flow for NZBGet.""" import logging from typing import Any, Dict, Optional import voluptuous as vol from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow, OptionsFlow from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_SCAN_INTERVAL, CONF_SSL, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import callback from homeassistant.helpers.typing import ConfigType, HomeAssistantType from .const import ( DEFAULT_NAME, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DEFAULT_SSL, DEFAULT_VERIFY_SSL, ) from .const import DOMAIN # pylint: disable=unused-import from .coordinator import NZBGetAPI, NZBGetAPIException _LOGGER = logging.getLogger(__name__) def validate_input(hass: HomeAssistantType, data: dict) -> Dict[str, Any]: """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ nzbget_api = NZBGetAPI( data[CONF_HOST], data.get(CONF_USERNAME), data.get(CONF_PASSWORD), data[CONF_SSL], data[CONF_VERIFY_SSL], data[CONF_PORT], ) nzbget_api.version() return True class NZBGetConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for NZBGet.""" VERSION = 1 CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return NZBGetOptionsFlowHandler(config_entry) async def async_step_import( self, user_input: Optional[ConfigType] = None ) -> Dict[str, Any]: """Handle a flow initiated by configuration file.""" if CONF_SCAN_INTERVAL in user_input: user_input[CONF_SCAN_INTERVAL] = user_input[CONF_SCAN_INTERVAL].seconds return await self.async_step_user(user_input) async def async_step_user( self, user_input: Optional[ConfigType] = None ) -> Dict[str, Any]: """Handle a flow initiated by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") errors = {} if user_input is not None: if CONF_VERIFY_SSL not in user_input: user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL try: await self.hass.async_add_executor_job( validate_input, self.hass, user_input ) except NZBGetAPIException: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") else: return self.async_create_entry( title=user_input[CONF_HOST], data=user_input, ) data_schema = { vol.Required(CONF_HOST): str, vol.Optional(CONF_NAME, default=DEFAULT_NAME): str, vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool, } if self.show_advanced_options: data_schema[ vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL) ] = bool return self.async_show_form( step_id="user", data_schema=vol.Schema(data_schema), errors=errors or {}, ) class NZBGetOptionsFlowHandler(OptionsFlow): """Handle NZBGet client options.""" def __init__(self, config_entry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input: Optional[ConfigType] = None): """Manage NZBGet options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) options = { vol.Optional( CONF_SCAN_INTERVAL, default=self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ), ): int, } return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/nzbget/config_flow.py
"""Support for LiteJet lights.""" import logging from homeassistant.components import litejet from homeassistant.components.light import ( ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, LightEntity, ) _LOGGER = logging.getLogger(__name__) ATTR_NUMBER = "number" def setup_platform(hass, config, add_entities, discovery_info=None): """Set up lights for the LiteJet platform.""" litejet_ = hass.data["litejet_system"] devices = [] for i in litejet_.loads(): name = litejet_.get_load_name(i) if not litejet.is_ignored(hass, name): devices.append(LiteJetLight(hass, litejet_, i, name)) add_entities(devices, True) class LiteJetLight(LightEntity): """Representation of a single LiteJet light.""" def __init__(self, hass, lj, i, name): """Initialize a LiteJet light.""" self._hass = hass self._lj = lj self._index = i self._brightness = 0 self._name = name lj.on_load_activated(i, self._on_load_changed) lj.on_load_deactivated(i, self._on_load_changed) def _on_load_changed(self): """Handle state changes.""" _LOGGER.debug("Updating due to notification for %s", self._name) self.schedule_update_ha_state(True) @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS @property def name(self): """Return the light's name.""" return self._name @property def brightness(self): """Return the light's brightness.""" return self._brightness @property def is_on(self): """Return if the light is on.""" return self._brightness != 0 @property def should_poll(self): """Return that lights do not require polling.""" return False @property def device_state_attributes(self): """Return the device state attributes.""" return {ATTR_NUMBER: self._index} def turn_on(self, **kwargs): """Turn on the light.""" if ATTR_BRIGHTNESS in kwargs: brightness = int(kwargs[ATTR_BRIGHTNESS] / 255 * 99) self._lj.activate_load_at(self._index, brightness, 0) else: self._lj.activate_load(self._index) def turn_off(self, **kwargs): """Turn off the light.""" self._lj.deactivate_load(self._index) def update(self): """Retrieve the light's brightness from the LiteJet system.""" self._brightness = self._lj.get_load_level(self._index) / 99 * 255
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/litejet/light.py
"""Support for Tado sensors for each zone.""" import logging from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, TEMP_CELSIUS from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from .const import ( DATA, DEFAULT_NAME, DOMAIN, SIGNAL_TADO_UPDATE_RECEIVED, TADO_BRIDGE, TYPE_AIR_CONDITIONING, TYPE_HEATING, TYPE_HOT_WATER, ) from .entity import TadoZoneEntity _LOGGER = logging.getLogger(__name__) ZONE_SENSORS = { TYPE_HEATING: [ "temperature", "humidity", "power", "link", "heating", "tado mode", "overlay", "early start", "open window", ], TYPE_AIR_CONDITIONING: [ "temperature", "humidity", "power", "link", "ac", "tado mode", "overlay", "open window", ], TYPE_HOT_WATER: ["power", "link", "tado mode", "overlay"], } DEVICE_SENSORS = ["tado bridge status"] async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities ): """Set up the Tado sensor platform.""" tado = hass.data[DOMAIN][entry.entry_id][DATA] # Create zone sensors zones = tado.zones devices = tado.devices entities = [] for zone in zones: zone_type = zone["type"] if zone_type not in ZONE_SENSORS: _LOGGER.warning("Unknown zone type skipped: %s", zone_type) continue entities.extend( [ TadoZoneSensor( tado, zone["name"], zone["id"], variable, zone["devices"][0] ) for variable in ZONE_SENSORS[zone_type] ] ) # Create device sensors for device in devices: entities.extend( [ TadoDeviceSensor(tado, device["name"], device["id"], variable, device) for variable in DEVICE_SENSORS ] ) if entities: async_add_entities(entities, True) class TadoZoneSensor(TadoZoneEntity, Entity): """Representation of a tado Sensor.""" def __init__(self, tado, zone_name, zone_id, zone_variable, device_info): """Initialize of the Tado Sensor.""" self._tado = tado super().__init__(zone_name, device_info, tado.device_id, zone_id) self.zone_id = zone_id self.zone_variable = zone_variable self._unique_id = f"{zone_variable} {zone_id} {tado.device_id}" self._state = None self._state_attributes = None self._tado_zone_data = None async def async_added_to_hass(self): """Register for sensor updates.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format( self._tado.device_id, "zone", self.zone_id ), self._async_update_callback, ) ) self._async_update_zone_data() @property def unique_id(self): """Return the unique id.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return f"{self.zone_name} {self.zone_variable}" @property def state(self): """Return the state of the sensor.""" return self._state @property def device_state_attributes(self): """Return the state attributes.""" return self._state_attributes @property def unit_of_measurement(self): """Return the unit of measurement.""" if self.zone_variable == "temperature": return self.hass.config.units.temperature_unit if self.zone_variable == "humidity": return PERCENTAGE if self.zone_variable == "heating": return PERCENTAGE if self.zone_variable == "ac": return None @property def icon(self): """Icon for the sensor.""" if self.zone_variable == "temperature": return "mdi:thermometer" if self.zone_variable == "humidity": return "mdi:water-percent" @callback def _async_update_callback(self): """Update and write state.""" self._async_update_zone_data() self.async_write_ha_state() @callback def _async_update_zone_data(self): """Handle update callbacks.""" try: self._tado_zone_data = self._tado.data["zone"][self.zone_id] except KeyError: return if self.zone_variable == "temperature": self._state = self.hass.config.units.temperature( self._tado_zone_data.current_temp, TEMP_CELSIUS ) self._state_attributes = { "time": self._tado_zone_data.current_temp_timestamp, "setting": 0, # setting is used in climate device } elif self.zone_variable == "humidity": self._state = self._tado_zone_data.current_humidity self._state_attributes = { "time": self._tado_zone_data.current_humidity_timestamp } elif self.zone_variable == "power": self._state = self._tado_zone_data.power elif self.zone_variable == "link": self._state = self._tado_zone_data.link elif self.zone_variable == "heating": self._state = self._tado_zone_data.heating_power_percentage self._state_attributes = { "time": self._tado_zone_data.heating_power_timestamp } elif self.zone_variable == "ac": self._state = self._tado_zone_data.ac_power self._state_attributes = {"time": self._tado_zone_data.ac_power_timestamp} elif self.zone_variable == "tado bridge status": self._state = self._tado_zone_data.connection elif self.zone_variable == "tado mode": self._state = self._tado_zone_data.tado_mode elif self.zone_variable == "overlay": self._state = self._tado_zone_data.overlay_active self._state_attributes = ( {"termination": self._tado_zone_data.overlay_termination_type} if self._tado_zone_data.overlay_active else {} ) elif self.zone_variable == "early start": self._state = self._tado_zone_data.preparation elif self.zone_variable == "open window": self._state = bool( self._tado_zone_data.open_window or self._tado_zone_data.open_window_detected ) self._state_attributes = self._tado_zone_data.open_window_attr class TadoDeviceSensor(Entity): """Representation of a tado Sensor.""" def __init__(self, tado, device_name, device_id, device_variable, device_info): """Initialize of the Tado Sensor.""" self._tado = tado self._device_info = device_info self.device_name = device_name self.device_id = device_id self.device_variable = device_variable self._unique_id = f"{device_variable} {device_id} {tado.device_id}" self._state = None self._state_attributes = None self._tado_device_data = None async def async_added_to_hass(self): """Register for sensor updates.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format( self._tado.device_id, "device", self.device_id ), self._async_update_callback, ) ) self._async_update_device_data() @property def unique_id(self): """Return the unique id.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return f"{self.device_name} {self.device_variable}" @property def state(self): """Return the state of the sensor.""" return self._state @property def should_poll(self): """Do not poll.""" return False @callback def _async_update_callback(self): """Update and write state.""" self._async_update_device_data() self.async_write_ha_state() @callback def _async_update_device_data(self): """Handle update callbacks.""" try: data = self._tado.data["device"][self.device_id] except KeyError: return if self.device_variable == "tado bridge status": self._state = data.get("connectionState", {}).get("value", False) @property def device_info(self): """Return the device_info of the device.""" return { "identifiers": {(DOMAIN, self.device_id)}, "name": self.device_name, "manufacturer": DEFAULT_NAME, "model": TADO_BRIDGE, }
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/tado/sensor.py
"""Support for HomeMatic devices.""" from datetime import datetime from functools import partial import logging from pyhomematic import HMConnection import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_MODE, ATTR_NAME, CONF_HOST, CONF_HOSTS, CONF_PASSWORD, CONF_PLATFORM, CONF_SSL, CONF_USERNAME, CONF_VERIFY_SSL, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from .const import ( ATTR_ADDRESS, ATTR_CHANNEL, ATTR_DEVICE_TYPE, ATTR_DISCOVER_DEVICES, ATTR_DISCOVERY_TYPE, ATTR_ERRORCODE, ATTR_INTERFACE, ATTR_LOW_BAT, ATTR_LOWBAT, ATTR_MESSAGE, ATTR_PARAM, ATTR_PARAMSET, ATTR_PARAMSET_KEY, ATTR_RX_MODE, ATTR_TIME, ATTR_UNIQUE_ID, ATTR_VALUE, ATTR_VALUE_TYPE, CONF_CALLBACK_IP, CONF_CALLBACK_PORT, CONF_INTERFACES, CONF_JSONPORT, CONF_LOCAL_IP, CONF_LOCAL_PORT, CONF_PATH, CONF_PORT, CONF_RESOLVENAMES, CONF_RESOLVENAMES_OPTIONS, DATA_CONF, DATA_HOMEMATIC, DATA_STORE, DISCOVER_BATTERY, DISCOVER_BINARY_SENSORS, DISCOVER_CLIMATE, DISCOVER_COVER, DISCOVER_LIGHTS, DISCOVER_LOCKS, DISCOVER_SENSORS, DISCOVER_SWITCHES, DOMAIN, EVENT_ERROR, EVENT_IMPULSE, EVENT_KEYPRESS, HM_DEVICE_TYPES, HM_IGNORE_DISCOVERY_NODE, HM_IGNORE_DISCOVERY_NODE_EXCEPTIONS, HM_IMPULSE_EVENTS, HM_PRESS_EVENTS, SERVICE_PUT_PARAMSET, SERVICE_RECONNECT, SERVICE_SET_DEVICE_VALUE, SERVICE_SET_INSTALL_MODE, SERVICE_SET_VARIABLE_VALUE, SERVICE_VIRTUALKEY, ) from .entity import HMHub _LOGGER = logging.getLogger(__name__) DEFAULT_LOCAL_IP = "0.0.0.0" DEFAULT_LOCAL_PORT = 0 DEFAULT_RESOLVENAMES = False DEFAULT_JSONPORT = 80 DEFAULT_PORT = 2001 DEFAULT_PATH = "" DEFAULT_USERNAME = "Admin" DEFAULT_PASSWORD = "" DEFAULT_SSL = False DEFAULT_VERIFY_SSL = False DEFAULT_CHANNEL = 1 DEVICE_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): "homematic", vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_ADDRESS): cv.string, vol.Required(ATTR_INTERFACE): cv.string, vol.Optional(ATTR_DEVICE_TYPE): cv.string, vol.Optional(ATTR_CHANNEL, default=DEFAULT_CHANNEL): vol.Coerce(int), vol.Optional(ATTR_PARAM): cv.string, vol.Optional(ATTR_UNIQUE_ID): cv.string, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_INTERFACES, default={}): { cv.match_all: { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string, vol.Optional( CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES ): vol.In(CONF_RESOLVENAMES_OPTIONS), vol.Optional(CONF_JSONPORT, default=DEFAULT_JSONPORT): cv.port, vol.Optional( CONF_USERNAME, default=DEFAULT_USERNAME ): cv.string, vol.Optional( CONF_PASSWORD, default=DEFAULT_PASSWORD ): cv.string, vol.Optional(CONF_CALLBACK_IP): cv.string, vol.Optional(CONF_CALLBACK_PORT): cv.port, vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, vol.Optional( CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL ): cv.boolean, } }, vol.Optional(CONF_HOSTS, default={}): { cv.match_all: { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional( CONF_USERNAME, default=DEFAULT_USERNAME ): cv.string, vol.Optional( CONF_PASSWORD, default=DEFAULT_PASSWORD ): cv.string, } }, vol.Optional(CONF_LOCAL_IP, default=DEFAULT_LOCAL_IP): cv.string, vol.Optional(CONF_LOCAL_PORT): cv.port, } ) }, extra=vol.ALLOW_EXTRA, ) SCHEMA_SERVICE_VIRTUALKEY = vol.Schema( { vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper), vol.Required(ATTR_CHANNEL): vol.Coerce(int), vol.Required(ATTR_PARAM): cv.string, vol.Optional(ATTR_INTERFACE): cv.string, } ) SCHEMA_SERVICE_SET_VARIABLE_VALUE = vol.Schema( { vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_VALUE): cv.match_all, vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, } ) SCHEMA_SERVICE_SET_DEVICE_VALUE = vol.Schema( { vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper), vol.Required(ATTR_CHANNEL): vol.Coerce(int), vol.Required(ATTR_PARAM): vol.All(cv.string, vol.Upper), vol.Required(ATTR_VALUE): cv.match_all, vol.Optional(ATTR_VALUE_TYPE): vol.In( ["boolean", "dateTime.iso8601", "double", "int", "string"] ), vol.Optional(ATTR_INTERFACE): cv.string, } ) SCHEMA_SERVICE_RECONNECT = vol.Schema({}) SCHEMA_SERVICE_SET_INSTALL_MODE = vol.Schema( { vol.Required(ATTR_INTERFACE): cv.string, vol.Optional(ATTR_TIME, default=60): cv.positive_int, vol.Optional(ATTR_MODE, default=1): vol.All(vol.Coerce(int), vol.In([1, 2])), vol.Optional(ATTR_ADDRESS): vol.All(cv.string, vol.Upper), } ) SCHEMA_SERVICE_PUT_PARAMSET = vol.Schema( { vol.Required(ATTR_INTERFACE): cv.string, vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper), vol.Required(ATTR_PARAMSET_KEY): vol.All(cv.string, vol.Upper), vol.Required(ATTR_PARAMSET): dict, vol.Optional(ATTR_RX_MODE): vol.All(cv.string, vol.Upper), } ) def setup(hass, config): """Set up the Homematic component.""" conf = config[DOMAIN] hass.data[DATA_CONF] = remotes = {} hass.data[DATA_STORE] = set() # Create hosts-dictionary for pyhomematic for rname, rconfig in conf[CONF_INTERFACES].items(): remotes[rname] = { "ip": rconfig.get(CONF_HOST), "port": rconfig.get(CONF_PORT), "path": rconfig.get(CONF_PATH), "resolvenames": rconfig.get(CONF_RESOLVENAMES), "jsonport": rconfig.get(CONF_JSONPORT), "username": rconfig.get(CONF_USERNAME), "password": rconfig.get(CONF_PASSWORD), "callbackip": rconfig.get(CONF_CALLBACK_IP), "callbackport": rconfig.get(CONF_CALLBACK_PORT), "ssl": rconfig[CONF_SSL], "verify_ssl": rconfig.get(CONF_VERIFY_SSL), "connect": True, } for sname, sconfig in conf[CONF_HOSTS].items(): remotes[sname] = { "ip": sconfig.get(CONF_HOST), "port": sconfig[CONF_PORT], "username": sconfig.get(CONF_USERNAME), "password": sconfig.get(CONF_PASSWORD), "connect": False, } # Create server thread bound_system_callback = partial(_system_callback_handler, hass, config) hass.data[DATA_HOMEMATIC] = homematic = HMConnection( local=config[DOMAIN].get(CONF_LOCAL_IP), localport=config[DOMAIN].get(CONF_LOCAL_PORT, DEFAULT_LOCAL_PORT), remotes=remotes, systemcallback=bound_system_callback, interface_id="homeassistant", ) # Start server thread, connect to hosts, initialize to receive events homematic.start() # Stops server when Home Assistant is shutting down hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop) # Init homematic hubs entity_hubs = [] for hub_name in conf[CONF_HOSTS].keys(): entity_hubs.append(HMHub(hass, homematic, hub_name)) def _hm_service_virtualkey(service): """Service to handle virtualkey servicecalls.""" address = service.data.get(ATTR_ADDRESS) channel = service.data.get(ATTR_CHANNEL) param = service.data.get(ATTR_PARAM) # Device not found hmdevice = _device_from_servicecall(hass, service) if hmdevice is None: _LOGGER.error("%s not found for service virtualkey!", address) return # Parameter doesn't exist for device if param not in hmdevice.ACTIONNODE: _LOGGER.error("%s not datapoint in hm device %s", param, address) return # Channel doesn't exist for device if channel not in hmdevice.ACTIONNODE[param]: _LOGGER.error("%i is not a channel in hm device %s", channel, address) return # Call parameter hmdevice.actionNodeData(param, True, channel) hass.services.register( DOMAIN, SERVICE_VIRTUALKEY, _hm_service_virtualkey, schema=SCHEMA_SERVICE_VIRTUALKEY, ) def _service_handle_value(service): """Service to call setValue method for HomeMatic system variable.""" entity_ids = service.data.get(ATTR_ENTITY_ID) name = service.data[ATTR_NAME] value = service.data[ATTR_VALUE] if entity_ids: entities = [ entity for entity in entity_hubs if entity.entity_id in entity_ids ] else: entities = entity_hubs if not entities: _LOGGER.error("No HomeMatic hubs available") return for hub in entities: hub.hm_set_variable(name, value) hass.services.register( DOMAIN, SERVICE_SET_VARIABLE_VALUE, _service_handle_value, schema=SCHEMA_SERVICE_SET_VARIABLE_VALUE, ) def _service_handle_reconnect(service): """Service to reconnect all HomeMatic hubs.""" homematic.reconnect() hass.services.register( DOMAIN, SERVICE_RECONNECT, _service_handle_reconnect, schema=SCHEMA_SERVICE_RECONNECT, ) def _service_handle_device(service): """Service to call setValue method for HomeMatic devices.""" address = service.data.get(ATTR_ADDRESS) channel = service.data.get(ATTR_CHANNEL) param = service.data.get(ATTR_PARAM) value = service.data.get(ATTR_VALUE) value_type = service.data.get(ATTR_VALUE_TYPE) # Convert value into correct XML-RPC Type. # https://docs.python.org/3/library/xmlrpc.client.html#xmlrpc.client.ServerProxy if value_type: if value_type == "int": value = int(value) elif value_type == "double": value = float(value) elif value_type == "boolean": value = bool(value) elif value_type == "dateTime.iso8601": value = datetime.strptime(value, "%Y%m%dT%H:%M:%S") else: # Default is 'string' value = str(value) # Device not found hmdevice = _device_from_servicecall(hass, service) if hmdevice is None: _LOGGER.error("%s not found!", address) return hmdevice.setValue(param, value, channel) hass.services.register( DOMAIN, SERVICE_SET_DEVICE_VALUE, _service_handle_device, schema=SCHEMA_SERVICE_SET_DEVICE_VALUE, ) def _service_handle_install_mode(service): """Service to set interface into install mode.""" interface = service.data.get(ATTR_INTERFACE) mode = service.data.get(ATTR_MODE) time = service.data.get(ATTR_TIME) address = service.data.get(ATTR_ADDRESS) homematic.setInstallMode(interface, t=time, mode=mode, address=address) hass.services.register( DOMAIN, SERVICE_SET_INSTALL_MODE, _service_handle_install_mode, schema=SCHEMA_SERVICE_SET_INSTALL_MODE, ) def _service_put_paramset(service): """Service to call the putParamset method on a HomeMatic connection.""" interface = service.data.get(ATTR_INTERFACE) address = service.data.get(ATTR_ADDRESS) paramset_key = service.data.get(ATTR_PARAMSET_KEY) # When passing in the paramset from a YAML file we get an OrderedDict # here instead of a dict, so add this explicit cast. # The service schema makes sure that this cast works. paramset = dict(service.data.get(ATTR_PARAMSET)) rx_mode = service.data.get(ATTR_RX_MODE) _LOGGER.debug( "Calling putParamset: %s, %s, %s, %s, %s", interface, address, paramset_key, paramset, rx_mode, ) homematic.putParamset(interface, address, paramset_key, paramset, rx_mode) hass.services.register( DOMAIN, SERVICE_PUT_PARAMSET, _service_put_paramset, schema=SCHEMA_SERVICE_PUT_PARAMSET, ) return True def _system_callback_handler(hass, config, src, *args): """System callback handler.""" # New devices available at hub if src == "newDevices": (interface_id, dev_descriptions) = args interface = interface_id.split("-")[-1] # Device support active? if not hass.data[DATA_CONF][interface]["connect"]: return addresses = [] for dev in dev_descriptions: address = dev["ADDRESS"].split(":")[0] if address not in hass.data[DATA_STORE]: hass.data[DATA_STORE].add(address) addresses.append(address) # Register EVENTS # Search all devices with an EVENTNODE that includes data bound_event_callback = partial(_hm_event_handler, hass, interface) for dev in addresses: hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(dev) if hmdevice.EVENTNODE: hmdevice.setEventCallback(callback=bound_event_callback, bequeath=True) # Create Home Assistant entities if addresses: for component_name, discovery_type in ( ("switch", DISCOVER_SWITCHES), ("light", DISCOVER_LIGHTS), ("cover", DISCOVER_COVER), ("binary_sensor", DISCOVER_BINARY_SENSORS), ("sensor", DISCOVER_SENSORS), ("climate", DISCOVER_CLIMATE), ("lock", DISCOVER_LOCKS), ("binary_sensor", DISCOVER_BATTERY), ): # Get all devices of a specific type found_devices = _get_devices(hass, discovery_type, addresses, interface) # When devices of this type are found # they are setup in Home Assistant and a discovery event is fired if found_devices: discovery.load_platform( hass, component_name, DOMAIN, { ATTR_DISCOVER_DEVICES: found_devices, ATTR_DISCOVERY_TYPE: discovery_type, }, config, ) # Homegear error message elif src == "error": _LOGGER.error("Error: %s", args) (interface_id, errorcode, message) = args hass.bus.fire(EVENT_ERROR, {ATTR_ERRORCODE: errorcode, ATTR_MESSAGE: message}) def _get_devices(hass, discovery_type, keys, interface): """Get the HomeMatic devices for given discovery_type.""" device_arr = [] for key in keys: device = hass.data[DATA_HOMEMATIC].devices[interface][key] class_name = device.__class__.__name__ metadata = {} # Class not supported by discovery type if ( discovery_type != DISCOVER_BATTERY and class_name not in HM_DEVICE_TYPES[discovery_type] ): continue # Load metadata needed to generate a parameter list if discovery_type == DISCOVER_SENSORS: metadata.update(device.SENSORNODE) elif discovery_type == DISCOVER_BINARY_SENSORS: metadata.update(device.BINARYNODE) elif discovery_type == DISCOVER_BATTERY: if ATTR_LOWBAT in device.ATTRIBUTENODE: metadata.update({ATTR_LOWBAT: device.ATTRIBUTENODE[ATTR_LOWBAT]}) elif ATTR_LOW_BAT in device.ATTRIBUTENODE: metadata.update({ATTR_LOW_BAT: device.ATTRIBUTENODE[ATTR_LOW_BAT]}) else: continue else: metadata.update({None: device.ELEMENT}) # Generate options for 1...n elements with 1...n parameters for param, channels in metadata.items(): if ( param in HM_IGNORE_DISCOVERY_NODE and class_name not in HM_IGNORE_DISCOVERY_NODE_EXCEPTIONS.get(param, []) ): continue if discovery_type == DISCOVER_SWITCHES and class_name == "IPKeySwitchLevel": channels.remove(8) channels.remove(12) if discovery_type == DISCOVER_LIGHTS and class_name == "IPKeySwitchLevel": channels.remove(4) # Add devices _LOGGER.debug( "%s: Handling %s: %s: %s", discovery_type, key, param, channels ) for channel in channels: name = _create_ha_id( name=device.NAME, channel=channel, param=param, count=len(channels) ) unique_id = _create_ha_id( name=key, channel=channel, param=param, count=len(channels) ) device_dict = { CONF_PLATFORM: "homematic", ATTR_ADDRESS: key, ATTR_INTERFACE: interface, ATTR_NAME: name, ATTR_DEVICE_TYPE: class_name, ATTR_CHANNEL: channel, ATTR_UNIQUE_ID: unique_id, } if param is not None: device_dict[ATTR_PARAM] = param # Add new device try: DEVICE_SCHEMA(device_dict) device_arr.append(device_dict) except vol.MultipleInvalid as err: _LOGGER.error("Invalid device config: %s", str(err)) return device_arr def _create_ha_id(name, channel, param, count): """Generate a unique entity id.""" # HMDevice is a simple device if count == 1 and param is None: return name # Has multiple elements/channels if count > 1 and param is None: return f"{name} {channel}" # With multiple parameters on first channel if count == 1 and param is not None: return f"{name} {param}" # Multiple parameters with multiple channels if count > 1 and param is not None: return f"{name} {channel} {param}" def _hm_event_handler(hass, interface, device, caller, attribute, value): """Handle all pyhomematic device events.""" try: channel = int(device.split(":")[1]) address = device.split(":")[0] hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(address) except (TypeError, ValueError): _LOGGER.error("Event handling channel convert error!") return # Return if not an event supported by device if attribute not in hmdevice.EVENTNODE: return _LOGGER.debug("Event %s for %s channel %i", attribute, hmdevice.NAME, channel) # Keypress event if attribute in HM_PRESS_EVENTS: hass.bus.fire( EVENT_KEYPRESS, {ATTR_NAME: hmdevice.NAME, ATTR_PARAM: attribute, ATTR_CHANNEL: channel}, ) return # Impulse event if attribute in HM_IMPULSE_EVENTS: hass.bus.fire(EVENT_IMPULSE, {ATTR_NAME: hmdevice.NAME, ATTR_CHANNEL: channel}) return _LOGGER.warning("Event is unknown and not forwarded") def _device_from_servicecall(hass, service): """Extract HomeMatic device from service call.""" address = service.data.get(ATTR_ADDRESS) interface = service.data.get(ATTR_INTERFACE) if address == "BIDCOS-RF": address = "BidCoS-RF" if interface: return hass.data[DATA_HOMEMATIC].devices[interface].get(address) for devices in hass.data[DATA_HOMEMATIC].devices.values(): if address in devices: return devices[address]
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/homematic/__init__.py
"""Locks on Zigbee Home Automation networks.""" import functools from zigpy.zcl.foundation import Status from homeassistant.components.lock import ( DOMAIN, STATE_LOCKED, STATE_UNLOCKED, LockEntity, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core import discovery from .core.const import ( CHANNEL_DOORLOCK, DATA_ZHA, DATA_ZHA_DISPATCHERS, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED, ) from .core.registries import ZHA_ENTITIES from .entity import ZhaEntity # The first state is Zigbee 'Not fully locked' STATE_LIST = [STATE_UNLOCKED, STATE_LOCKED, STATE_UNLOCKED] STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN) VALUE_TO_STATE = dict(enumerate(STATE_LIST)) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation Door Lock from config entry.""" entities_to_create = hass.data[DATA_ZHA][DOMAIN] unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( discovery.async_add_entities, async_add_entities, entities_to_create ), ) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) @STRICT_MATCH(channel_names=CHANNEL_DOORLOCK) class ZhaDoorLock(ZhaEntity, LockEntity): """Representation of a ZHA lock.""" def __init__(self, unique_id, zha_device, channels, **kwargs): """Init this sensor.""" super().__init__(unique_id, zha_device, channels, **kwargs) self._doorlock_channel = self.cluster_channels.get(CHANNEL_DOORLOCK) async def async_added_to_hass(self): """Run when about to be added to hass.""" await super().async_added_to_hass() self.async_accept_signal( self._doorlock_channel, SIGNAL_ATTR_UPDATED, self.async_set_state ) @callback def async_restore_last_state(self, last_state): """Restore previous state.""" self._state = VALUE_TO_STATE.get(last_state.state, last_state.state) @property def is_locked(self) -> bool: """Return true if entity is locked.""" if self._state is None: return False return self._state == STATE_LOCKED @property def device_state_attributes(self): """Return state attributes.""" return self.state_attributes async def async_lock(self, **kwargs): """Lock the lock.""" result = await self._doorlock_channel.lock_door() if not isinstance(result, list) or result[0] is not Status.SUCCESS: self.error("Error with lock_door: %s", result) return self.async_write_ha_state() async def async_unlock(self, **kwargs): """Unlock the lock.""" result = await self._doorlock_channel.unlock_door() if not isinstance(result, list) or result[0] is not Status.SUCCESS: self.error("Error with unlock_door: %s", result) return self.async_write_ha_state() async def async_update(self): """Attempt to retrieve state from the lock.""" await super().async_update() await self.async_get_state() @callback def async_set_state(self, attr_id, attr_name, value): """Handle state update from channel.""" self._state = VALUE_TO_STATE.get(value, self._state) self.async_write_ha_state() async def async_get_state(self, from_cache=True): """Attempt to retrieve state from the lock.""" if self._doorlock_channel: state = await self._doorlock_channel.get_attribute_value( "lock_state", from_cache=from_cache ) if state is not None: self._state = VALUE_TO_STATE.get(state, self._state) async def refresh(self, time): """Call async_get_state at an interval.""" await self.async_get_state(from_cache=False)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/core/info", json={"result": "ok", "data": {"version_latest": "1.0.0"}}, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 7 assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
GenericStudent/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/zha/lock.py