input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
import attr from cached_property import cached_property from navmazing import NavigateToAttribute from cfme.automate.dialogs import AddBoxView from cfme.automate.dialogs import BoxForm from cfme.modeling.base import BaseCollection from cfme.modeling.base import BaseEntity from cfme.modeling.base import parent_of_type from cfme.utils.appliance.implementations.ui import CFMENavigateStep from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.appliance.implementations.ui import navigator class EditBoxView(BoxForm): """EditBox View.""" @property def is_displayed(self): return ( self.in_customization and self.title.text == f"Editing Dialog {self.box_label} [Box Information]" ) @attr.s class Box(BaseEntity): """A class representing one Box of dialog.""" box_label = attr.ib() box_desc = attr.ib(default=None) from cfme.automate.dialogs.dialog_element import ElementCollection _collections = {'elements': ElementCollection} @cached_property def elements(self): return self.collections.elements @property def tree_path(self): return self.parent.tree_path + [self.box_label] @property def tab(self): from cfme.automate.dialogs.dialog_tab import Tab return parent_of_type(self, Tab) @attr.s class BoxCollection(BaseCollection): ENTITY = Box @property def tree_path(self): return self.parent.tree_path def create(self, box_label=None, box_desc=None): """Create box method. Args: box_label and box_description. """ view = navigate_to(self, "Add") view.new_box.click() view.edit_box.click() view.fill({'box_label': box_label, 'box_desc': box_desc}) view.save_button.click() return self.instantiate(box_label=box_label, box_desc=box_desc) @navigator.register(BoxCollection) class Add(CFMENavigateStep): VIEW = AddBoxView prerequisite = NavigateToAttribute('parent.parent', 'Add') def step(self, *args, **kwargs): self.prerequisite_view.add_section.click()
""" This test generate one default report for each category under reports accordion """ import pytest from cfme import test_requirements from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE # from selenium.common.exceptions import NoSuchElementException # from utils.log import logger pytestmark = [ pytest.mark.tier(3), test_requirements.report, pytest.mark.usefixtures('setup_provider_modscope'), pytest.mark.provider([InfraProvider], scope='module', selector=ONE), ] report_path = [ ["Configuration Management", "Virtual Machines", "Guest OS Information - any OS"], ["Migration Readiness", "Virtual Machines", "Summary - VMs migration ready"], ["Operations", "Virtual Machines", "VMs not Powered On"], ["VM Sprawl", "Candidates", "Summary of VM Create and Deletes"], ["Relationships", "Virtual Machines, Folders, Clusters", "VM Relationships"], ["Events", "Operations", "Events for VM prod_webserver"], ["Performance by Asset Type", "Virtual Machines", "Top CPU Consumers (weekly)"], ["Running Processes", "Virtual Machines", "Processes for prod VMs sort by CPU Time"], ["Trending", "Clusters", "Cluster CPU Trends (last week)"], ["Tenants", "Tenant Quotas", "Tenant Quotas"], ["Provisioning", "Activity Reports", "Provisioning Activity - by VM"], ] @pytest.mark.rhel_testing @pytest.mark.parametrize('path', report_path, scope="module", ids=lambda param: '/'.join(param[:2])) def test_reports_generate_report(request, path, appliance): """ This Tests run one default report for each category Steps: *Run one default report *Delete this Saved Report from the Database Polarion: assignee: pvala casecomponent: Reporting caseimportance: high initialEstimate: 1/16h """ report = appliance.collections.reports.instantiate( type=path[0], subtype=path[1], menu_name=path[2] ).queue(wait_for_finish=True) request.addfinalizer(report.delete_if_exists) assert report.exists
ManageIQ/integration_tests
cfme/tests/intelligence/reports/test_generate_report.py
cfme/automate/dialogs/dialog_box.py
from lxml.html import document_fromstring from widgetastic.utils import Parameter from widgetastic.widget import ParametrizedView from widgetastic.widget import Text from widgetastic.widget import View from widgetastic_patternfly import Accordion from widgetastic_patternfly import BootstrapNav from widgetastic_patternfly import BreadCrumb from widgetastic_patternfly import Dropdown from cfme.common import BaseLoggedInPage from cfme.common import TimelinesView from widgetastic_manageiq import BaseEntitiesView from widgetastic_manageiq import BaseNonInteractiveEntitiesView from widgetastic_manageiq import BootstrapTreeview from widgetastic_manageiq import Button from widgetastic_manageiq import ItemsToolBarViewSelector from widgetastic_manageiq import JSBaseEntity from widgetastic_manageiq import ManageIQTree from widgetastic_manageiq import Search from widgetastic_manageiq import SummaryTable class ComputePhysicalInfrastructureServersView(BaseLoggedInPage): """Common parts for server views.""" title = Text('.//div[@id="center_div" or @id="main-content"]//h1') @property def in_compute_physical_infrastructure_servers(self): return (self.logged_in_as_current_user and self.navigation.currently_selected == ["Compute", "Physical Infrastructure", "Servers"]) class PhysicalServerEntity(JSBaseEntity): @property def data(self): data_dict = super().data if 'quadicon' in data_dict and data_dict['quadicon']: quad_data = document_fromstring(data_dict['quadicon']) data_dict['no_host'] = int(quad_data.xpath(self.QUADRANT.format(pos="a"))[0].text) data_dict['state'] = quad_data.xpath(self.QUADRANT.format(pos="b"))[0].get('style') data_dict['vendor'] = quad_data.xpath(self.QUADRANT.format(pos="c"))[0].get('alt') data_dict['creds'] = quad_data.xpath(self.QUADRANT.format(pos="d"))[0].get('alt') return data_dict class PhysicalServerDetailsToolbar(View): """Represents physical toolbar and its controls.""" monitoring = Dropdown(text="Monitoring") configuration = Dropdown(text="Configuration") policy = Dropdown(text="Policy") power = Dropdown(text="Power") identify = Dropdown(text="Identify") lifecycle = Dropdown(text="Lifecycle") @ParametrizedView.nested class custom_button(ParametrizedView): # noqa PARAMETERS = ("button_group", ) _dropdown = Dropdown(text=Parameter("button_group")) def item_select(self, button, handle_alert=False): self._dropdown.item_select(button, handle_alert=handle_alert) class PhysicalServerDetailsEntities(View): """Represents Details page.""" properties = SummaryTable(title="Properties") networks = SummaryTable(title="Networks") relationships = SummaryTable(title="Relationships") power_management = SummaryTable(title="Power Management") assets = SummaryTable(title="Assets") firmware = SummaryTable(title="Firmware") network_devices = SummaryTable(title="Network Devices") smart = SummaryTable(title="Smart Management") class PhysicalServerDetailsView(ComputePhysicalInfrastructureServersView): """Main PhysicalServer details page.""" breadcrumb = BreadCrumb(locator='.//ol[@class="breadcrumb"]') toolbar = View.nested(PhysicalServerDetailsToolbar) entities = View.nested(PhysicalServerDetailsEntities) @property def is_displayed(self): title = "{name} (Summary)".format(name=self.context["object"].name) return (self.in_compute_physical_infrastructure_servers and self.breadcrumb.active_location == title) class PhysicalServerTimelinesView(TimelinesView, ComputePhysicalInfrastructureServersView): """Represents a PhysicalServer Timelines page.""" pass class PhysicalServerProvisionView(BaseLoggedInPage): """Represents the Provision Physical Server page.""" breadcrumb = BreadCrumb(locator='.//ol[@class="breadcrumb"]') @property def is_displayed(self): title = "Add PhysicalServer" return self.breadcrumb.active_location == title class PhysicalServerManagePoliciesView(BaseLoggedInPage): """PhysicalServer's Manage Policies view.""" policies = BootstrapTreeview("protectbox") entities = View.nested(BaseNonInteractiveEntitiesView) save = Button("Save") reset = Button("Reset") cancel = Button("Cancel") breadcrumb = BreadCrumb(locator='.//ol[@class="breadcrumb"]') @property def is_displayed(self): title = "'Physical Server' Policy Assignment" return self.breadcrumb.active_location == title class PhysicalServerEditTagsView(BaseLoggedInPage): """PhysicalServer's EditTags view.""" policies = BootstrapTreeview("protectbox") entities = View.nested(BaseNonInteractiveEntitiesView) breadcrumb = BreadCrumb(locator='.//ol[@class="breadcrumb"]') @property def is_displayed(self): title = "Tag Assignment" return self.breadcrumb.active_location == title class PhysicalServersToolbar(View): """Represents hosts toolbar and its controls.""" configuration = Dropdown(text="Configuration") policy = Dropdown(text="Policy") lifecycle = Dropdown(text="Lifecycle") monitoring = Dropdown(text="Monitoring") power = Dropdown(text="Power") identify = Dropdown(text="Identify") view_selector = View.nested(ItemsToolBarViewSelector) @ParametrizedView.nested class custom_button(ParametrizedView): # noqa PARAMETERS = ("button_group",) _dropdown = Dropdown(text=Parameter("button_group")) def item_select(self, button, handle_alert=False): self._dropdown.item_select(button, handle_alert=handle_alert) class PhysicalServerSideBar(View): """Represents left side bar. It usually contains navigation, filters, etc.""" @View.nested class filters(Accordion): # noqa tree = ManageIQTree() class PhysicalServerEntitiesView(BaseEntitiesView): """Represents the view with different items like hosts.""" @property def entity_class(self): return PhysicalServerEntity class PhysicalServersView(ComputePhysicalInfrastructureServersView): toolbar = View.nested(PhysicalServersToolbar) sidebar = View.nested(PhysicalServerSideBar) search = View.nested(Search) including_entities = View.include(PhysicalServerEntitiesView, use_parent=True) @property def is_displayed(self): return (self.in_compute_physical_infrastructure_servers and self.title.text == "Physical Servers") @View.nested class my_filters(Accordion): # noqa ACCORDION_NAME = "My Filters" navigation = BootstrapNav('.//div/ul') tree = ManageIQTree() class PhysicalServerNetworkDevicesView(ComputePhysicalInfrastructureServersView): """Represents the Network Devices page""" @property def is_displayed(self): return ("Network Devices" in self.title.text and self.in_compute_physical_infrastructure_servers) class PhysicalServerStorageDevicesView(ComputePhysicalInfrastructureServersView): """Represents the Storage Devices page""" @property def is_displayed(self): return ("Storage Devices" in self.title.text and self.in_compute_physical_infrastructure_servers)
""" This test generate one default report for each category under reports accordion """ import pytest from cfme import test_requirements from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE # from selenium.common.exceptions import NoSuchElementException # from utils.log import logger pytestmark = [ pytest.mark.tier(3), test_requirements.report, pytest.mark.usefixtures('setup_provider_modscope'), pytest.mark.provider([InfraProvider], scope='module', selector=ONE), ] report_path = [ ["Configuration Management", "Virtual Machines", "Guest OS Information - any OS"], ["Migration Readiness", "Virtual Machines", "Summary - VMs migration ready"], ["Operations", "Virtual Machines", "VMs not Powered On"], ["VM Sprawl", "Candidates", "Summary of VM Create and Deletes"], ["Relationships", "Virtual Machines, Folders, Clusters", "VM Relationships"], ["Events", "Operations", "Events for VM prod_webserver"], ["Performance by Asset Type", "Virtual Machines", "Top CPU Consumers (weekly)"], ["Running Processes", "Virtual Machines", "Processes for prod VMs sort by CPU Time"], ["Trending", "Clusters", "Cluster CPU Trends (last week)"], ["Tenants", "Tenant Quotas", "Tenant Quotas"], ["Provisioning", "Activity Reports", "Provisioning Activity - by VM"], ] @pytest.mark.rhel_testing @pytest.mark.parametrize('path', report_path, scope="module", ids=lambda param: '/'.join(param[:2])) def test_reports_generate_report(request, path, appliance): """ This Tests run one default report for each category Steps: *Run one default report *Delete this Saved Report from the Database Polarion: assignee: pvala casecomponent: Reporting caseimportance: high initialEstimate: 1/16h """ report = appliance.collections.reports.instantiate( type=path[0], subtype=path[1], menu_name=path[2] ).queue(wait_for_finish=True) request.addfinalizer(report.delete_if_exists) assert report.exists
ManageIQ/integration_tests
cfme/tests/intelligence/reports/test_generate_report.py
cfme/common/physical_server_views.py
""" A model of a PXE Server in CFME """ import attr from navmazing import NavigateToAttribute from navmazing import NavigateToSibling from selenium.common.exceptions import NoSuchElementException from varmeth import variable from widgetastic.widget import Checkbox from widgetastic.widget import Text from widgetastic.widget import View from widgetastic_patternfly import Accordion from widgetastic_patternfly import BootstrapSelect from widgetastic_patternfly import Button from widgetastic_patternfly import Dropdown from cfme.base import BaseCollection from cfme.base import BaseEntity from cfme.common import BaseLoggedInPage from cfme.exceptions import displayed_not_implemented from cfme.utils import conf from cfme.utils import ParamClassName from cfme.utils.appliance import Navigatable from cfme.utils.appliance.implementations.ui import CFMENavigateStep from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.appliance.implementations.ui import navigator from cfme.utils.datafile import load_data_file from cfme.utils.path import project_path from cfme.utils.pretty import Pretty from cfme.utils.update import Updateable from cfme.utils.wait import wait_for from widgetastic_manageiq import Input from widgetastic_manageiq import ManageIQTree from widgetastic_manageiq import ScriptBox from widgetastic_manageiq import SummaryTable from widgetastic_manageiq import Table class PXEToolBar(View): """ represents PXE toolbar and its controls """ # todo: add back button later configuration = Dropdown(text='Configuration') class PXESideBar(View): """ represents left side bar. it usually contains navigation, filters, etc """ @View.nested class servers(Accordion): # noqa ACCORDION_NAME = "PXE Servers" tree = ManageIQTree() @View.nested class templates(Accordion): # noqa ACCORDION_NAME = "Customization Templates" tree = ManageIQTree() @View.nested class image_types(Accordion): # noqa ACCORDION_NAME = "System Image Types" tree = ManageIQTree() @View.nested class datastores(Accordion): # noqa ACCORDION_NAME = "ISO Datastores" tree = ManageIQTree() class PXEMainView(BaseLoggedInPage): """ represents whole All PXE Servers page """ toolbar = View.nested(PXEToolBar) sidebar = View.nested(PXESideBar) title = Text('//div[@id="main-content"]//h1') entities = Table(locator='.//div[@id="records_div" or @id="main_div"]//table') @property def is_displayed(self): return self.navigation.currently_selected == ['Compute', 'Infrastructure', 'PXE'] class PXEServersView(PXEMainView): """ represents whole All PXE Servers page """ @property def is_displayed(self): return (super().is_displayed and self.title.text == 'All PXE Servers') class PXEDetailsToolBar(PXEToolBar): """ represents the toolbar which appears when any pxe entity is clicked """ reload = Button(title='Refresh this page') class PXEServerDetailsView(PXEMainView): """ represents Server Details view """ toolbar = View.nested(PXEDetailsToolBar) @View.nested class entities(View): # noqa basic_information = SummaryTable(title="Basic Information") pxe_image_menus = SummaryTable(title='PXE Image Menus') is_displayed = displayed_not_implemented class PXEServerForm(View): title = Text('//div[@id="main-content"]//h1') # common fields name = Input(id='name') depot_type = BootstrapSelect(id='log_protocol') access_url = Input(id='access_url') pxe_dir = Input(id='pxe_directory') windows_images_dir = Input(id='windows_images_directory') customization_dir = Input(id='customization_directory') filename = Input(id='pxemenu_0') uri = Input(id='uri') # both NFS and Samba # Samba only username = Input(id='log_userid') password = Input(id='log_password') confirm_password = Input(id='log_verify') validate = Button('Validate the credentials by logging into the Server') is_displayed = displayed_not_implemented class PXEServerAddView(PXEServerForm): """ represents Add New PXE Server view """ add = Button('Add') cancel = Button('Cancel') class PXEServerEditView(PXEServerForm): """ represents PXE Server Edit view """ save = Button('Save') reset = Button('Reset') cancel = Button('Cancel') class PXEImageEditView(View): """ it can be found when some image is clicked in PXE Server Tree """ title = Text('//div[@id="main-content"]//h1') default_for_windows = Checkbox(id='default_for_windows') type = BootstrapSelect(id='image_typ') save = Button('Save') reset = Button('Reset') cancel = Button('Cancel') is_displayed = displayed_not_implemented class PXEServer(Updateable, Pretty, Navigatable): """Model of a PXE Server object in CFME Args: name: Name of PXE server. depot_type: Depot type, either Samba or Network File System. uri: The Depot URI. userid: The Samba username. password: The Samba password. access_url: HTTP access path for PXE server. pxe_dir: The PXE dir for accessing configuration. windows_dir: Windows source directory. customize_dir: Customization directory for templates. menu_filename: Menu filename for iPXE/syslinux menu. """ pretty_attrs = ['name', 'uri', 'access_url'] _param_name = ParamClassName('name') def __init__(self, name=None, depot_type=None, uri=None, userid=None, password=None, access_url=None, pxe_dir=None, windows_dir=None, customize_dir=None, menu_filename=None, appliance=None): Navigatable.__init__(self, appliance=appliance) self.name = name self.depot_type = depot_type self.uri = uri self.userid = userid # todo: turn into Credentials class self.password = password self.access_url = access_url self.pxe_dir = pxe_dir self.windows_dir = windows_dir self.customize_dir = customize_dir self.menu_filename = menu_filename def create(self, cancel=False, refresh=True, refresh_timeout=120): """ Creates a PXE server object Args: cancel (boolean): Whether to cancel out of the creation. The cancel is done after all the information present in the PXE Server has been filled in the UI. refresh (boolean): Whether to run the refresh operation on the PXE server after the add has been completed. """ view = navigate_to(self, 'Add') view.fill({'name': self.name, 'depot_type': self.depot_type, 'access_url': self.access_url, 'pxe_dir': self.pxe_dir, 'windows_images_dir': self.windows_dir, 'customization_dir': self.customize_dir, 'filename': self.menu_filename, 'uri': self.uri, # Samba only 'username': self.userid, 'password': self.password, 'confirm_password': self.password}) if self.depot_type == 'Samba' and self.userid and self.password: view.validate.click() main_view = self.create_view(PXEServersView) if cancel: view.cancel.click() main_view.flash.assert_success_message('Add of new PXE Server ' 'was cancelled by the user') else: view.add.click() main_view.flash.assert_no_error() if refresh: self.refresh(timeout=refresh_timeout) @variable(alias="db") def exists(self): """ Checks if the PXE server already exists """ dbs = self.appliance.db.client candidates = list(dbs.session.query(dbs["pxe_servers"])) return self.name in [s.name for s in candidates] @exists.variant('ui') def exists_ui(self): """ Checks if the PXE server already exists """ try: navigate_to(self, 'Details') return True except NoSuchElementException: return False def update(self, updates, cancel=False): """ Updates a PXE server in the UI. Better to use utils.update.update context manager than call this directly. Args: updates (dict): fields that are changing. cancel (boolean): whether to cancel out of the update. """ view = navigate_to(self, 'Edit') view.fill(updates) if updates.get('userid') or updates.get('password'): view.validate.click() name = updates.get('name') or self.name main_view = self.create_view(PXEServersView, override=updates) if cancel: view.cancel.click() main_view.flash.assert_success_message('Edit of PXE Server "{}" was ' 'cancelled by the user'.format(name)) else: view.save.click() main_view.flash.assert_no_error() def delete(self, cancel=True): """ Deletes a PXE server from CFME Args: cancel: Whether to cancel the deletion, defaults to True """ view = navigate_to(self, 'Details') view.toolbar.configuration.item_select('Remove this PXE Server from Inventory', handle_alert=not cancel) if not cancel: main_view = self.create_view(PXEServersView) main_view.flash.assert_no_error() else: navigate_to(self, 'Details') def refresh(self, wait=True, timeout=120): """ Refreshes the PXE relationships and waits for it to be updated """ view = navigate_to(self, 'Details') last_time = view.entities.basic_information.get_text_of('Last Refreshed On') view.toolbar.configuration.item_select('Refresh Relationships', handle_alert=True) view.flash.assert_success_message('PXE Server "{}": Refresh Relationships ' 'successfully initiated'.format(self.name)) if wait: basic_info = view.entities.basic_information wait_for(lambda lt: lt != basic_info.get_text_of('Last Refreshed On'), func_args=[last_time], fail_func=view.toolbar.reload.click, num_sec=timeout, message="pxe refresh") @variable(alias='db') def get_pxe_image_type(self, image_name): pxe_i = self.appliance.db.client["pxe_images"] pxe_s = self.appliance.db.client["pxe_servers"] pxe_t = self.appliance.db.client["pxe_image_types"] hosts = list(self.appliance.db.client.session.query(pxe_t.name) .join(pxe_i, pxe_i.pxe_image_type_id == pxe_t.id) .join(pxe_s, pxe_i.pxe_server_id == pxe_s.id) .filter(pxe_s.name == self.name) .filter(pxe_i.name == image_name)) if hosts: return hosts[0][0] else: return None @get_pxe_image_type.variant('ui') def get_pxe_image_type_ui(self, image_name): view = navigate_to(self, 'Details') view.sidebar.servers.tree.click_path('All PXE Servers', self.name, 'PXE Images', image_name) details_view = self.create_view(PXESystemImageTypeDetailsView) return details_view.entities.basic_information.get_text_of('Type') def set_pxe_image_type(self, image_name, image_type): """ Function to set the image type of a PXE image """ # todo: maybe create appropriate navmazing destinations instead ? if self.get_pxe_image_type(image_name) != image_type: view = navigate_to(self, 'Details') view.sidebar.servers.tree.click_path('All PXE Servers', self.name, 'PXE Images', image_name) details_view = self.create_view(PXESystemImageTypeDetailsView) details_view.toolbar.configuration.item_select('Edit this PXE Image') edit_view = self.create_view(PXEImageEditView) edit_view.fill({'type': image_type}) edit_view.save.click() @navigator.register(PXEServer, 'All') class PXEServerAll(CFMENavigateStep): VIEW = PXEServersView prerequisite = NavigateToSibling('PXEMainPage') def step(self, *args, **kwargs): self.view.sidebar.servers.tree.click_path('All PXE Servers') @navigator.register(PXEServer, 'Add') class PXEServerAdd(CFMENavigateStep): VIEW = PXEServerAddView prerequisite = NavigateToSibling('All') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.configuration.item_select('Add a New PXE Server') @navigator.register(PXEServer, 'Details') class PXEServerDetails(CFMENavigateStep): VIEW = PXEServerDetailsView prerequisite = NavigateToSibling('All') def step(self, *args, **kwargs): self.prerequisite_view.sidebar.servers.tree.click_path('All PXE Servers', self.obj.name) @navigator.register(PXEServer, 'Edit') class PXEServerEdit(CFMENavigateStep): VIEW = PXEServerEditView prerequisite = NavigateToSibling('Details') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.configuration.item_select('Edit this PXE Server') class PXECustomizationTemplatesView(PXEMainView): """ represents Customization Template Groups page """ entities = Table(locator='.//div[@id="template_folders_div"]/table') table = Table("//div[@id='main_div']//table") @property def is_displayed(self): return (super().is_displayed and self.title.text == 'All Customization Templates - System Image Types') class PXECustomizationTemplateDetailsView(PXEMainView): """ represents some certain Customization Template Details page """ toolbar = View.nested(PXEDetailsToolBar) @View.nested class entities(View): # noqa basic_information = SummaryTable(title="Basic Information") script = ScriptBox(locator='//textarea[contains(@id, "script_data")]') @property def is_displayed(self): if getattr(self.context['object'], 'name'): title = 'Customization Template "{name}"'.format(name=self.context['object'].name) return (super().is_displayed and self.title.text == title) else: return False class PXECustomizationTemplateForm(View): title = Text('//div[@id="main-content"]//h1') name = Input(id='name') description = Input(id='description') image_type = BootstrapSelect(id='img_typ') type = BootstrapSelect(id='typ') script = ScriptBox(locator='//textarea[contains(@id, "script_data")]') is_displayed = displayed_not_implemented class PXECustomizationTemplateAddView(PXECustomizationTemplateForm): add = Button('Add') cancel = Button('Cancel') class PXECustomizationTemplateEditView(PXECustomizationTemplateForm): save = Button('Save') reset = Button('Reset') cancel = Button('Cancel') class PXECustomizationTemplateCopyView(PXECustomizationTemplateForm): toolbar = View.nested(PXEDetailsToolBar) add = Button('Add') cancel = Button('Cancel') @attr.s class CustomizationTemplate(Updateable, Pretty, BaseEntity): """ Model of a Customization Template in CFME """ pretty_attrs = ['name', 'image_type'] name = attr.ib(default=None) description = attr.ib(default=None) script_data = attr.ib(default=None) image_type = attr.ib(default=None) script_type = attr.ib(default=None) @variable(alias='db') def exists(self): """ Checks if the Customization template already exists """ dbs = self.appliance.db.client candidates = list(dbs.session.query(dbs["customization_templates"])) return self.name in [s.name for s in candidates] @exists.variant('ui') def exists_ui(self): """ Checks if the Customization template already exists """ try: navigate_to(self, 'Details') return True except NoSuchElementException: return False def update(self, updates, cancel=False): """ Updates a Customization Template server in the UI. Better to use utils.update.update context manager than call this directly. Args: updates (dict): fields that are changing. cancel (boolean): whether to cancel out of the update. """ if 'image_type' in updates and updates['image_type'] is None: updates['image_type'] = '<Choose>' elif 'script_type' in updates and updates['script_type'] is None: updates['script_type'] = '<Choose>' view = navigate_to(self, 'Edit') view.fill(updates) main_view = self.create_view(PXECustomizationTemplatesView, override=updates) if cancel: view.cancel.click() else: view.save.click() main_view.flash.assert_no_error() def copy(self, name=None, description=None, cancel=False): """ This method is used to copy a Customization Template server via UI. Args: name (str): This field contains the name of the newly copied Customization Template. description (str) : This field contains the description of the newly copied Customization Template. cancel (bool): It's used for flag to cancel or not the copy operation. """ view = navigate_to(self, 'Copy') name = name or f'Copy of {self.name}' description = description or f'Copy of {self.description}' view.fill({'name': name, 'description': description}) customization_template = self.parent.instantiate(name, description, self.script_data, self.image_type, self.script_type) if cancel: view.cancel.click() else: view.add.click() main_view = self.create_view(PXECustomizationTemplatesView) main_view.flash.assert_no_error() return customization_template @attr.s class CustomizationTemplateCollection(BaseCollection): """Collection class for CustomizationTemplate""" ENTITY = CustomizationTemplate def create(self, name, description, image_type, script_type, script_data, cancel=False): """ Creates a Customization Template object Args: cancel (boolean): Whether to cancel out of the creation. The cancel is done after all the information present in the CT has been filled in the UI. name: Name of CT description:description: The description field of CT. image_type: Image type of the CT. script_data: Contains the script data. script_type: It specifies the script_type of the script. """ customization_templates = self.instantiate(name, description, script_data, image_type, script_type) view = navigate_to(self, 'Add') view.fill({'name': name, 'description': description, 'image_type': image_type, 'type': script_type, 'script': script_data}) main_view = self.create_view(PXECustomizationTemplatesView) if cancel: view.cancel.click() else: view.add.click() main_view.flash.assert_no_error() return customization_templates def delete(self, cancel=False, *ct_objs): """ Deletes a Customization Template server from CFME Args: ct_objs: It's a Customization Template object cancel: Whether to cancel the deletion, defaults to True """ for ct_obj in ct_objs: view = navigate_to(ct_obj, 'Details') view.toolbar.configuration.item_select('Remove this Customization Template', handle_alert=not cancel) view = ct_obj.create_view(PXECustomizationTemplatesView) view.flash.assert_no_error() @navigator.register(CustomizationTemplateCollection, 'All') class CustomizationTemplateAll(CFMENavigateStep): VIEW = PXECustomizationTemplatesView prerequisite = NavigateToSibling('PXEMainPage') def step(self, *args, **kwargs): self.view.sidebar.templates.tree.click_path( 'All Customization Templates - System Image Types' ) @navigator.register(CustomizationTemplateCollection, 'Add') class CustomizationTemplateAdd(CFMENavigateStep): VIEW = PXECustomizationTemplateAddView prerequisite = NavigateToSibling('All') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.configuration.item_select('Add a New Customization Template') @navigator.register(CustomizationTemplate, 'Details') class CustomizationTemplateDetails(CFMENavigateStep): VIEW = PXECustomizationTemplateDetailsView prerequisite = NavigateToAttribute('parent', 'All') def step(self, *args, **kwargs): tree = self.view.sidebar.templates.tree tree.click_path('All Customization Templates - System Image Types', self.obj.image_type, self.obj.name) @navigator.register(CustomizationTemplate, 'Copy') class CustomizationTemplateCopy(CFMENavigateStep): VIEW = PXECustomizationTemplateCopyView prerequisite = NavigateToSibling('Details') def step(self, *args, **kwargs): self.view.toolbar.configuration.item_select("Copy this Customization Template") @navigator.register(CustomizationTemplate, 'Edit') class CustomizationTemplateEdit(CFMENavigateStep): VIEW = PXECustomizationTemplateEditView prerequisite = NavigateToSibling('Details') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.configuration.item_select('Edit this Customization Template') class PXESystemImageTypesView(PXEMainView): """ represents whole All System Image Types page """ @property def is_displayed(self): return (super().is_displayed and self.title.text == 'All System Image Types') class PXESystemImageTypeDetailsView(PXEMainView): toolbar = View.nested(PXEDetailsToolBar) @View.nested class entities(View): # noqa basic_information = SummaryTable(title="Basic Information") is_displayed = displayed_not_implemented class PXESystemImageTypeForm(View): title = Text('//div[@id="main-content"]//h1') name = Input(id='name') type = BootstrapSelect(id='provision_type') is_displayed = displayed_not_implemented class PXESystemImageTypeAddView(PXESystemImageTypeForm): add = Button('Add') cancel = Button('Cancel') class PXESystemImageTypeEditView(PXESystemImageTypeForm): save = Button('Save') reset = Button('Reset') cancel = Button('Cancel') @attr.s class SystemImage(Updateable, BaseEntity): """Model of an ISO System Image in CFME. Args: name: The name of the System Image. It's the same as ISO filename in ISO domain image_type: SystemImageType object datastore: ISODatastore object """ name = attr.ib(default=None) image_type = attr.ib(default=None) datastore = attr.ib(default=None) def set_image_type(self): """Changes the Type field in Basic Information table for System Image in UI.""" view = navigate_to(self, 'Edit') changed = view.image_type.fill_with(self.image_type.name) if changed: view.save.click() else: view.cancel.click() @attr.s class SystemImageCollection(BaseCollection): ENTITY = SystemImage class PXESystemImageDeatilsView(PXEMainView): @property def is_displayed(self): return self.sidebar.datastores.tree.read()[-1] == self.context['object'].name @View.nested class entities(View): # noqa basic_information = SummaryTable(title="Basic Information") class PXESystemImageEditView(PXEMainView): is_displayed = displayed_not_implemented image_type = BootstrapSelect(id='image_typ') save = Button('Save') reset = Button('Reset') cancel = Button('Cancel') @navigator.register(SystemImage, 'Details') class SystemImageDetails(CFMENavigateStep): VIEW = PXESystemImageDeatilsView prerequisite = NavigateToSibling('PXEMainPage') def step(self, *args, **kwargs): self.view.sidebar.datastores.tree.click_path( 'All ISO Datastores', self.view.context['object'].datastore.provider, 'ISO Images', self.view.context['object'].name) @navigator.register(SystemImage, 'Edit') class SystemImageEdit(CFMENavigateStep): VIEW = PXESystemImageEditView prerequisite = NavigateToSibling('Details') def step(self, *args, **kwargs): self.view.toolbar.configuration.item_select('Edit this ISO Image') @attr.s class SystemImageType(Updateable, Pretty, BaseEntity): """Model of a System Image Type in CFME. Args: name: The name of the System Image Type. provision_type: The provision type, either Vm or Host. """ pretty_attrs = ['name', 'provision_type'] VM_OR_INSTANCE = "VM and Instance" HOST_OR_NODE = "Host / Node" name = attr.ib(default=None) provision_type = attr.ib(default=None) def update(self, updates, cancel=False): """ Updates a System Image Type in the UI. Better to use utils.update.update context manager than call this directly. Args: updates (dict): fields that are changing. cancel (boolean): whether to cancel out of the update. """ view = navigate_to(self, 'Edit') view.fill({'name': updates.get('name'), 'type': updates.get('provision_type')}) if cancel: view.cancel.click() else: view.save.click() # No flash message def delete(self, cancel=True): """ Deletes a System Image Type from CFME Args: cancel: Whether to cancel the deletion, defaults to True """ view = navigate_to(self, 'Details') view.toolbar.configuration.item_select('Remove this System Image Type', handle_alert=not cancel) if not cancel: main_view = self.create_view(PXESystemImageTypesView) msg = f'System Image Type "{self.name}": Delete successful' main_view.flash.assert_success_message(msg) else: navigate_to(self, 'Details') @attr.s class SystemImageTypeCollection(BaseCollection): """ Collection class for SystemImageType. """ ENTITY = SystemImageType def create(self, name, provision_type, cancel=False): """ Creates a System Image Type object Args: name: It contains name of the System Image Type provision_type: Type on Image. i.e Vm and Instance or Host cancel (boolean): Whether to cancel out of the creation. The cancel is done after all the information present in the SIT has been filled in the UI. """ system_image_type = self.instantiate(name, provision_type) view = navigate_to(self, 'Add') view.fill({'name': name, 'type': provision_type}) if cancel: view.cancel.click() msg = 'Add of new System Image Type was cancelled by the user' else: view.add.click() msg = f'System Image Type "{name}" was added' main_view = self.create_view(PXESystemImageTypesView) main_view.flash.assert_success_message(msg) return system_image_type def delete(self, cancel=False, *sys_objs): """ This methods deletes the System Image Type using select option, hence can be used for multiple delete. Args: cancel: This is the boolean argument required for handle_alert sys_objs: It's System Image Types object """ view = navigate_to(self, 'All') for sys_obj in sys_objs: view.entities.row(Name=sys_obj.name)[0].click() view.toolbar.configuration.item_select("Remove System Image Types", handle_alert=not cancel) main_view = self.create_view(PXESystemImageTypesView) main_view.flash.assert_no_error() @navigator.register(SystemImageTypeCollection, 'All') class SystemImageTypeAll(CFMENavigateStep): VIEW = PXESystemImageTypesView prerequisite = NavigateToSibling('PXEMainPage') def step(self, *args, **kwargs): self.view.sidebar.image_types.tree.click_path('All System Image Types') @navigator.register(SystemImageTypeCollection, 'Add') class SystemImageTypeAdd(CFMENavigateStep): VIEW = PXESystemImageTypeAddView prerequisite = NavigateToSibling('All') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.configuration.item_select('Add a new System Image Type') @navigator.register(SystemImageType, 'Details') class SystemImageTypeDetails(CFMENavigateStep): VIEW = PXESystemImageTypeDetailsView prerequisite = NavigateToAttribute('parent', 'All') def step(self, *args, **kwargs): self.prerequisite_view.sidebar.image_types.tree.click_path('All System Image Types', self.obj.name) @navigator.register(SystemImageType, 'Edit') class SystemImageTypeEdit(CFMENavigateStep): VIEW = PXESystemImageTypeEditView prerequisite = NavigateToSibling('Details') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.configuration.item_select('Edit this System Image Type') class PXEDatastoresView(PXEMainView): """ represents whole All ISO Datastores page """ @property def is_displayed(self): return (super().is_displayed and self.title.text == 'All ISO Datastores') class PXEDatastoreDetailsView(PXEMainView): toolbar = View.nested(PXEDetailsToolBar) @View.nested class entities(View): # noqa basic_information = SummaryTable(title="Basic Information") is_displayed = displayed_not_implemented class PXEDatastoreForm(View): title = Text('//div[@id="main-content"]//h1') provider = BootstrapSelect(id='ems_id') is_displayed = displayed_not_implemented class PXEDatastoreAddView(PXEDatastoreForm): add = Button('Add') cancel = Button('Cancel') class PXEDatastoreEditView(PXEDatastoreForm): save = Button('Save') reset = Button('Reset') cancel = Button('Cancel') class ISODatastore(Updateable, Pretty, Navigatable): """Model of a PXE Server object in CFME Args: provider: Provider name. """ _param_name = ParamClassName('ds_name') pretty_attrs = ['provider'] def __init__(self, provider=None, appliance=None): Navigatable.__init__(self, appliance=appliance) self.provider = provider def create(self, cancel=False, refresh=True, refresh_timeout=120): """ Creates an ISO datastore object Args: cancel (boolean): Whether to cancel out of the creation. The cancel is done after all the information present in the ISO datastore has been filled in the UI. refresh (boolean): Whether to run the refresh operation on the ISO datastore after the add has been completed. """ view = navigate_to(self, 'Add') view.fill({'provider': self.provider}) main_view = self.create_view(PXEDatastoresView) if cancel: view.cancel.click() msg = 'Add of new ISO Datastore was cancelled by the user' else: view.add.click() msg = f'ISO Datastore "{self.provider}" was added' main_view.flash.assert_success_message(msg) if refresh: self.refresh(timeout=refresh_timeout) @variable(alias='db') def exists(self): """ Checks if the ISO Datastore already exists via db """ iso = self.appliance.db.client['iso_datastores'] ems = self.appliance.db.client['ext_management_systems'] name = self.provider iso_ds = list(self.appliance.db.client.session.query(iso.id) .join(ems, iso.ems_id == ems.id) .filter(ems.name == name)) if iso_ds: return True else: return False @exists.variant('ui') def exists_ui(self): """ Checks if the ISO Datastore already exists via UI """ try: navigate_to(self, 'Details') return True except NoSuchElementException: return False def delete(self, cancel=True): """ Deletes an ISO Datastore from CFME Args: cancel: Whether to cancel the deletion, defaults to True """ view = navigate_to(self, 'Details') view.toolbar.configuration.item_select('Remove this ISO Datastore from Inventory', handle_alert=not cancel) if not cancel: main_view = self.create_view(PXEDatastoresView) main_view.flash.assert_success_message('ISO Datastore "{}": Delete successful' .format(self.provider)) else: navigate_to(self, 'Details') def refresh(self, wait=True, timeout=120): """ Refreshes the PXE relationships and waits for it to be updated """ view = navigate_to(self, 'Details') basic_info = view.entities.basic_information last_time = basic_info.get_text_of('Last Refreshed On') view.toolbar.configuration.item_select('Refresh Relationships', handle_alert=True) view.flash.assert_success_message( f'ISO Datastore "{self.provider}": Refresh Relationships successfully initiated' ) if wait: wait_for(lambda lt: lt != basic_info.get_text_of('Last Refreshed On'), func_args=[last_time], fail_func=view.toolbar.reload.click, num_sec=timeout, message="iso refresh") def set_iso_image_type(self, image_name, image_type): """ Function to set the image type of a PXE image """ view = navigate_to(self, 'All') view.sidebar.datastores.tree.click_path('All ISO Datastores', self.provider, 'ISO Images', image_name) view.toolbar.configuration.item_select('Edit this ISO Image') view = view.browser.create_view(PXEImageEditView) changed = view.fill({'type': image_type}) # Click save if enabled else click Cancel if changed: view.save.click() else: view.cancel.click() @navigator.register(ISODatastore, 'All') class ISODatastoreAll(CFMENavigateStep): VIEW = PXEDatastoresView prerequisite = NavigateToSibling('PXEMainPage') def step(self, *args, **kwargs): self.view.sidebar.datastores.tree.click_path("All ISO Datastores") @navigator.register(ISODatastore, 'Add') class ISODatastoreAdd(CFMENavigateStep): VIEW = PXEDatastoreAddView prerequisite = NavigateToSibling('All') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.configuration.item_select('Add a New ISO Datastore') @navigator.register(ISODatastore, 'Details') class ISODatastoreDetails(CFMENavigateStep): VIEW = PXEDatastoreDetailsView prerequisite = NavigateToSibling('All') def step(self, *args, **kwargs): self.view.sidebar.datastores.tree.click_path("All ISO Datastores", self.obj.provider) @navigator.register(SystemImage, 'PXEMainPage') @navigator.register(PXEServer, 'PXEMainPage') @navigator.register(CustomizationTemplateCollection, 'PXEMainPage') @navigator.register(SystemImageTypeCollection, 'PXEMainPage') @navigator.register(ISODatastore, 'PXEMainPage') class PXEMainPage(CFMENavigateStep): prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def step(self, *args, **kwargs): self.prerequisite_view.navigation.select('Compute', 'Infrastructure', 'PXE') def get_template_from_config(template_config_name, create=False, appliance=None): """ Convenience function to grab the details for a template from the yamls and create template. """ assert appliance is not None template_config = conf.cfme_data.get('customization_templates', {})[template_config_name] script_data = load_data_file(str(project_path.join(template_config['script_file'])), replacements=template_config['replacements']) script_data = script_data.read() collection = appliance.collections.customization_templates kwargs = { 'name': template_config['name'], 'description': template_config['description'], 'image_type': template_config['image_type'], 'script_type': template_config['script_type'], 'script_data': script_data } customization_template = collection.instantiate(**kwargs) if create and not customization_template.exists(): return collection.create(**kwargs) return customization_template def get_pxe_server_from_config(pxe_config_name, appliance): """ Convenience function to grab the details for a pxe server fomr the yamls. """ pxe_config = conf.cfme_data.get('pxe_servers', {})[pxe_config_name] return PXEServer(name=pxe_config['name'], depot_type=pxe_config['depot_type'], uri=pxe_config['uri'], userid=pxe_config.get('userid') or None, password=pxe_config.get('password') or None, access_url=pxe_config['access_url'], pxe_dir=pxe_config['pxe_dir'], windows_dir=pxe_config['windows_dir'], customize_dir=pxe_config['customize_dir'], menu_filename=pxe_config['menu_filename'], appliance=appliance) def remove_all_pxe_servers(): """ Convenience function to remove all PXE servers """ view = navigate_to(PXEServer, 'All') if view.entities.is_displayed: for entity in view.entities.rows(): entity[0].ensure_checked() view.toolbar.configuration.item_select('Remove PXE Servers', handle_alert=True)
""" This test generate one default report for each category under reports accordion """ import pytest from cfme import test_requirements from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE # from selenium.common.exceptions import NoSuchElementException # from utils.log import logger pytestmark = [ pytest.mark.tier(3), test_requirements.report, pytest.mark.usefixtures('setup_provider_modscope'), pytest.mark.provider([InfraProvider], scope='module', selector=ONE), ] report_path = [ ["Configuration Management", "Virtual Machines", "Guest OS Information - any OS"], ["Migration Readiness", "Virtual Machines", "Summary - VMs migration ready"], ["Operations", "Virtual Machines", "VMs not Powered On"], ["VM Sprawl", "Candidates", "Summary of VM Create and Deletes"], ["Relationships", "Virtual Machines, Folders, Clusters", "VM Relationships"], ["Events", "Operations", "Events for VM prod_webserver"], ["Performance by Asset Type", "Virtual Machines", "Top CPU Consumers (weekly)"], ["Running Processes", "Virtual Machines", "Processes for prod VMs sort by CPU Time"], ["Trending", "Clusters", "Cluster CPU Trends (last week)"], ["Tenants", "Tenant Quotas", "Tenant Quotas"], ["Provisioning", "Activity Reports", "Provisioning Activity - by VM"], ] @pytest.mark.rhel_testing @pytest.mark.parametrize('path', report_path, scope="module", ids=lambda param: '/'.join(param[:2])) def test_reports_generate_report(request, path, appliance): """ This Tests run one default report for each category Steps: *Run one default report *Delete this Saved Report from the Database Polarion: assignee: pvala casecomponent: Reporting caseimportance: high initialEstimate: 1/16h """ report = appliance.collections.reports.instantiate( type=path[0], subtype=path[1], menu_name=path[2] ).queue(wait_for_finish=True) request.addfinalizer(report.delete_if_exists) assert report.exists
ManageIQ/integration_tests
cfme/tests/intelligence/reports/test_generate_report.py
cfme/infrastructure/pxe.py
"""Switches on Zigbee Home Automation networks.""" from __future__ import annotations import functools from typing import Any from zigpy.zcl.clusters.general import OnOff from zigpy.zcl.foundation import Status from homeassistant.components.switch import DOMAIN, SwitchEntity from homeassistant.const import STATE_ON, STATE_UNAVAILABLE from homeassistant.core import State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .core import discovery from .core.const import ( CHANNEL_ON_OFF, DATA_ZHA, DATA_ZHA_DISPATCHERS, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED, ) from .core.registries import ZHA_ENTITIES from .entity import ZhaEntity, ZhaGroupEntity STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN) GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, DOMAIN) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Zigbee Home Automation switch from config entry.""" entities_to_create = hass.data[DATA_ZHA][DOMAIN] unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( discovery.async_add_entities, async_add_entities, entities_to_create ), ) hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub) class BaseSwitch(SwitchEntity): """Common base class for zha switches.""" def __init__(self, *args, **kwargs): """Initialize the ZHA switch.""" self._on_off_channel = None self._state = None super().__init__(*args, **kwargs) @property def is_on(self) -> bool: """Return if the switch is on based on the statemachine.""" if self._state is None: return False return self._state async def async_turn_on(self, **kwargs) -> None: """Turn the entity on.""" result = await self._on_off_channel.on() if not isinstance(result, list) or result[1] is not Status.SUCCESS: return self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs) -> None: """Turn the entity off.""" result = await self._on_off_channel.off() if not isinstance(result, list) or result[1] is not Status.SUCCESS: return self._state = False self.async_write_ha_state() @STRICT_MATCH(channel_names=CHANNEL_ON_OFF) class Switch(BaseSwitch, ZhaEntity): """ZHA switch.""" def __init__(self, unique_id, zha_device, channels, **kwargs): """Initialize the ZHA switch.""" super().__init__(unique_id, zha_device, channels, **kwargs) self._on_off_channel = self.cluster_channels.get(CHANNEL_ON_OFF) @callback def async_set_state(self, attr_id: int, attr_name: str, value: Any): """Handle state update from channel.""" self._state = bool(value) self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Run when about to be added to hass.""" await super().async_added_to_hass() self.async_accept_signal( self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_state ) @callback def async_restore_last_state(self, last_state) -> None: """Restore previous state.""" self._state = last_state.state == STATE_ON async def async_update(self) -> None: """Attempt to retrieve on off state from the switch.""" await super().async_update() if self._on_off_channel: state = await self._on_off_channel.get_attribute_value("on_off") if state is not None: self._state = state @GROUP_MATCH() class SwitchGroup(BaseSwitch, ZhaGroupEntity): """Representation of a switch group.""" def __init__( self, entity_ids: list[str], unique_id: str, group_id: int, zha_device, **kwargs ) -> None: """Initialize a switch group.""" super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) self._available: bool = False group = self.zha_device.gateway.get_group(self._group_id) self._on_off_channel = group.endpoint[OnOff.cluster_id] async def async_update(self) -> None: """Query all members and determine the light group state.""" all_states = [self.hass.states.get(x) for x in self._entity_ids] states: list[State] = list(filter(None, all_states)) on_states = [state for state in states if state.state == STATE_ON] self._state = len(on_states) > 0 self._available = any(state.state != STATE_UNAVAILABLE for state in states)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/zha/switch.py
"""Support for Frontier Silicon Devices (Medion, Hama, Auna,...).""" import logging from afsapi import AFSAPI import requests import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) SUPPORT_FRONTIER_SILICON = ( SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_SEEK | SUPPORT_PLAY_MEDIA | SUPPORT_PLAY | SUPPORT_STOP | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE ) DEFAULT_PORT = 80 DEFAULT_PASSWORD = "1234" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string, vol.Optional(CONF_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Frontier Silicon platform.""" if discovery_info is not None: async_add_entities( [AFSAPIDevice(discovery_info["ssdp_description"], DEFAULT_PASSWORD, None)], True, ) return True host = config.get(CONF_HOST) port = config.get(CONF_PORT) password = config.get(CONF_PASSWORD) name = config.get(CONF_NAME) try: async_add_entities( [AFSAPIDevice(f"http://{host}:{port}/device", password, name)], True ) _LOGGER.debug("FSAPI device %s:%s -> %s", host, port, password) return True except requests.exceptions.RequestException: _LOGGER.error( "Could not add the FSAPI device at %s:%s -> %s", host, port, password ) return False class AFSAPIDevice(MediaPlayerEntity): """Representation of a Frontier Silicon device on the network.""" def __init__(self, device_url, password, name): """Initialize the Frontier Silicon API device.""" self._device_url = device_url self._password = password self._state = None self._name = name self._title = None self._artist = None self._album_name = None self._mute = None self._source = None self._source_list = None self._media_image_url = None self._max_volume = None self._volume_level = None # Properties @property def fs_device(self): """ Create a fresh fsapi session. A new session is created for each request in case someone else connected to the device in between the updates and invalidated the existing session (i.e UNDOK). """ return AFSAPI(self._device_url, self._password) @property def name(self): """Return the device name.""" return self._name @property def media_title(self): """Title of current playing media.""" return self._title @property def media_artist(self): """Artist of current playing media, music track only.""" return self._artist @property def media_album_name(self): """Album name of current playing media, music track only.""" return self._album_name @property def media_content_type(self): """Content type of current playing media.""" return MEDIA_TYPE_MUSIC @property def supported_features(self): """Flag of media commands that are supported.""" return SUPPORT_FRONTIER_SILICON @property def state(self): """Return the state of the player.""" return self._state # source @property def source_list(self): """List of available input sources.""" return self._source_list @property def source(self): """Name of the current input source.""" return self._source @property def media_image_url(self): """Image url of current playing media.""" return self._media_image_url @property def volume_level(self): """Volume level of the media player (0..1).""" return self._volume_level async def async_update(self): """Get the latest date and update device state.""" fs_device = self.fs_device if not self._name: self._name = await fs_device.get_friendly_name() if not self._source_list: self._source_list = await fs_device.get_mode_list() # The API seems to include 'zero' in the number of steps (e.g. if the range is # 0-40 then get_volume_steps returns 41) subtract one to get the max volume. # If call to get_volume fails set to 0 and try again next time. if not self._max_volume: self._max_volume = int(await fs_device.get_volume_steps() or 1) - 1 if await fs_device.get_power(): status = await fs_device.get_play_status() self._state = { "playing": STATE_PLAYING, "paused": STATE_PAUSED, "stopped": STATE_IDLE, "unknown": STATE_UNKNOWN, None: STATE_IDLE, }.get(status, STATE_UNKNOWN) else: self._state = STATE_OFF if self._state != STATE_OFF: info_name = await fs_device.get_play_name() info_text = await fs_device.get_play_text() self._title = " - ".join(filter(None, [info_name, info_text])) self._artist = await fs_device.get_play_artist() self._album_name = await fs_device.get_play_album() self._source = await fs_device.get_mode() self._mute = await fs_device.get_mute() self._media_image_url = await fs_device.get_play_graphic() volume = await self.fs_device.get_volume() # Prevent division by zero if max_volume not known yet self._volume_level = float(volume or 0) / (self._max_volume or 1) else: self._title = None self._artist = None self._album_name = None self._source = None self._mute = None self._media_image_url = None self._volume_level = None # Management actions # power control async def async_turn_on(self): """Turn on the device.""" await self.fs_device.set_power(True) async def async_turn_off(self): """Turn off the device.""" await self.fs_device.set_power(False) async def async_media_play(self): """Send play command.""" await self.fs_device.play() async def async_media_pause(self): """Send pause command.""" await self.fs_device.pause() async def async_media_play_pause(self): """Send play/pause command.""" if "playing" in self._state: await self.fs_device.pause() else: await self.fs_device.play() async def async_media_stop(self): """Send play/pause command.""" await self.fs_device.pause() async def async_media_previous_track(self): """Send previous track command (results in rewind).""" await self.fs_device.rewind() async def async_media_next_track(self): """Send next track command (results in fast-forward).""" await self.fs_device.forward() # mute @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return self._mute async def async_mute_volume(self, mute): """Send mute command.""" await self.fs_device.set_mute(mute) # volume async def async_volume_up(self): """Send volume up command.""" volume = await self.fs_device.get_volume() volume = int(volume or 0) + 1 await self.fs_device.set_volume(min(volume, self._max_volume)) async def async_volume_down(self): """Send volume down command.""" volume = await self.fs_device.get_volume() volume = int(volume or 0) - 1 await self.fs_device.set_volume(max(volume, 0)) async def async_set_volume_level(self, volume): """Set volume command.""" if self._max_volume: # Can't do anything sensible if not set volume = int(volume * self._max_volume) await self.fs_device.set_volume(volume) async def async_select_source(self, source): """Select input source.""" await self.fs_device.set_mode(source)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/frontier_silicon/media_player.py
"""Support for AquaLogic devices.""" from datetime import timedelta import logging import threading import time from aqualogic.core import AquaLogic import voluptuous as vol from homeassistant.const import ( CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.helpers import config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "aqualogic" UPDATE_TOPIC = f"{DOMAIN}_update" CONF_UNIT = "unit" RECONNECT_INTERVAL = timedelta(seconds=10) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( {vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.port} ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up AquaLogic platform.""" host = config[DOMAIN][CONF_HOST] port = config[DOMAIN][CONF_PORT] processor = AquaLogicProcessor(hass, host, port) hass.data[DOMAIN] = processor hass.bus.listen_once(EVENT_HOMEASSISTANT_START, processor.start_listen) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, processor.shutdown) _LOGGER.debug("AquaLogicProcessor %s:%i initialized", host, port) return True class AquaLogicProcessor(threading.Thread): """AquaLogic event processor thread.""" def __init__(self, hass, host, port): """Initialize the data object.""" super().__init__(daemon=True) self._hass = hass self._host = host self._port = port self._shutdown = False self._panel = None def start_listen(self, event): """Start event-processing thread.""" _LOGGER.debug("Event processing thread started") self.start() def shutdown(self, event): """Signal shutdown of processing event.""" _LOGGER.debug("Event processing signaled exit") self._shutdown = True def data_changed(self, panel): """Aqualogic data changed callback.""" self._hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC) def run(self): """Event thread.""" while True: self._panel = AquaLogic() self._panel.connect(self._host, self._port) self._panel.process(self.data_changed) if self._shutdown: return _LOGGER.error("Connection to %s:%d lost", self._host, self._port) time.sleep(RECONNECT_INTERVAL.seconds) @property def panel(self): """Retrieve the AquaLogic object.""" return self._panel
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/aqualogic/__init__.py
"""Support for Azure DevOps.""" from __future__ import annotations import logging from typing import Any from aioazuredevops.client import DevOpsClient import aiohttp from homeassistant.components.azure_devops.const import ( CONF_ORG, CONF_PAT, CONF_PROJECT, DATA_AZURE_DEVOPS_CLIENT, DOMAIN, ) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType, HomeAssistantType _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up Azure DevOps from a config entry.""" client = DevOpsClient() try: if entry.data[CONF_PAT] is not None: await client.authorize(entry.data[CONF_PAT], entry.data[CONF_ORG]) if not client.authorized: _LOGGER.warning( "Could not authorize with Azure DevOps. You may need to update your token" ) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data, ) ) return False await client.get_project(entry.data[CONF_ORG], entry.data[CONF_PROJECT]) except aiohttp.ClientError as exception: _LOGGER.warning(exception) raise ConfigEntryNotReady from exception instance_key = f"{DOMAIN}_{entry.data[CONF_ORG]}_{entry.data[CONF_PROJECT]}" hass.data.setdefault(instance_key, {})[DATA_AZURE_DEVOPS_CLIENT] = client # Setup components hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor") ) return True async def async_unload_entry(hass: HomeAssistantType, entry: ConfigType) -> bool: """Unload Azure DevOps config entry.""" del hass.data[f"{DOMAIN}_{entry.data[CONF_ORG]}_{entry.data[CONF_PROJECT]}"] return await hass.config_entries.async_forward_entry_unload(entry, "sensor") class AzureDevOpsEntity(Entity): """Defines a base Azure DevOps entity.""" def __init__(self, organization: str, project: str, name: str, icon: str) -> None: """Initialize the Azure DevOps entity.""" self._name = name self._icon = icon self._available = True self.organization = organization self.project = project @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def icon(self) -> str: """Return the mdi icon of the entity.""" return self._icon @property def available(self) -> bool: """Return True if entity is available.""" return self._available async def async_update(self) -> None: """Update Azure DevOps entity.""" if await self._azure_devops_update(): self._available = True else: if self._available: _LOGGER.debug( "An error occurred while updating Azure DevOps sensor", exc_info=True, ) self._available = False async def _azure_devops_update(self) -> None: """Update Azure DevOps entity.""" raise NotImplementedError() class AzureDevOpsDeviceEntity(AzureDevOpsEntity): """Defines a Azure DevOps device entity.""" @property def device_info(self) -> dict[str, Any]: """Return device information about this Azure DevOps instance.""" return { "identifiers": { ( DOMAIN, self.organization, self.project, ) }, "manufacturer": self.organization, "name": self.project, "entry_type": "service", }
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/azure_devops/__init__.py
"""Blueprint models.""" from __future__ import annotations import asyncio import logging import pathlib import shutil from typing import Any from awesomeversion import AwesomeVersion import voluptuous as vol from voluptuous.humanize import humanize_error from homeassistant import loader from homeassistant.const import ( CONF_DEFAULT, CONF_DOMAIN, CONF_NAME, CONF_PATH, __version__, ) from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.util import yaml from .const import ( BLUEPRINT_FOLDER, CONF_BLUEPRINT, CONF_HOMEASSISTANT, CONF_INPUT, CONF_MIN_VERSION, CONF_SOURCE_URL, CONF_USE_BLUEPRINT, DOMAIN, ) from .errors import ( BlueprintException, FailedToLoad, FileAlreadyExists, InvalidBlueprint, InvalidBlueprintInputs, MissingInput, ) from .schemas import BLUEPRINT_INSTANCE_FIELDS, BLUEPRINT_SCHEMA class Blueprint: """Blueprint of a configuration structure.""" def __init__( self, data: dict, *, path: str | None = None, expected_domain: str | None = None, ) -> None: """Initialize a blueprint.""" try: data = self.data = BLUEPRINT_SCHEMA(data) except vol.Invalid as err: raise InvalidBlueprint(expected_domain, path, data, err) from err # In future, we will treat this as "incorrect" and allow to recover from this data_domain = data[CONF_BLUEPRINT][CONF_DOMAIN] if expected_domain is not None and data_domain != expected_domain: raise InvalidBlueprint( expected_domain, path or self.name, data, f"Found incorrect blueprint type {data_domain}, expected {expected_domain}", ) self.domain = data_domain missing = yaml.extract_inputs(data) - set(data[CONF_BLUEPRINT][CONF_INPUT]) if missing: raise InvalidBlueprint( data_domain, path or self.name, data, f"Missing input definition for {', '.join(missing)}", ) @property def name(self) -> str: """Return blueprint name.""" return self.data[CONF_BLUEPRINT][CONF_NAME] @property def inputs(self) -> dict: """Return blueprint inputs.""" return self.data[CONF_BLUEPRINT][CONF_INPUT] @property def metadata(self) -> dict: """Return blueprint metadata.""" return self.data[CONF_BLUEPRINT] def update_metadata(self, *, source_url: str | None = None) -> None: """Update metadata.""" if source_url is not None: self.data[CONF_BLUEPRINT][CONF_SOURCE_URL] = source_url def yaml(self) -> str: """Dump blueprint as YAML.""" return yaml.dump(self.data) @callback def validate(self) -> list[str] | None: """Test if the Home Assistant installation supports this blueprint. Return list of errors if not valid. """ errors = [] metadata = self.metadata min_version = metadata.get(CONF_HOMEASSISTANT, {}).get(CONF_MIN_VERSION) if min_version is not None and AwesomeVersion(__version__) < AwesomeVersion( min_version ): errors.append(f"Requires at least Home Assistant {min_version}") return errors or None class BlueprintInputs: """Inputs for a blueprint.""" def __init__( self, blueprint: Blueprint, config_with_inputs: dict[str, Any] ) -> None: """Instantiate a blueprint inputs object.""" self.blueprint = blueprint self.config_with_inputs = config_with_inputs @property def inputs(self): """Return the inputs.""" return self.config_with_inputs[CONF_USE_BLUEPRINT][CONF_INPUT] @property def inputs_with_default(self): """Return the inputs and fallback to defaults.""" no_input = set(self.blueprint.inputs) - set(self.inputs) inputs_with_default = dict(self.inputs) for inp in no_input: blueprint_input = self.blueprint.inputs[inp] if isinstance(blueprint_input, dict) and CONF_DEFAULT in blueprint_input: inputs_with_default[inp] = blueprint_input[CONF_DEFAULT] return inputs_with_default def validate(self) -> None: """Validate the inputs.""" missing = set(self.blueprint.inputs) - set(self.inputs_with_default) if missing: raise MissingInput(self.blueprint.domain, self.blueprint.name, missing) # In future we can see if entities are correct domain, areas exist etc # using the new selector helper. @callback def async_substitute(self) -> dict: """Get the blueprint value with the inputs substituted.""" processed = yaml.substitute(self.blueprint.data, self.inputs_with_default) combined = {**processed, **self.config_with_inputs} # From config_with_inputs combined.pop(CONF_USE_BLUEPRINT) # From blueprint combined.pop(CONF_BLUEPRINT) return combined class DomainBlueprints: """Blueprints for a specific domain.""" def __init__( self, hass: HomeAssistant, domain: str, logger: logging.Logger, ) -> None: """Initialize a domain blueprints instance.""" self.hass = hass self.domain = domain self.logger = logger self._blueprints = {} self._load_lock = asyncio.Lock() hass.data.setdefault(DOMAIN, {})[domain] = self @property def blueprint_folder(self) -> pathlib.Path: """Return the blueprint folder.""" return pathlib.Path(self.hass.config.path(BLUEPRINT_FOLDER, self.domain)) @callback def async_reset_cache(self) -> None: """Reset the blueprint cache.""" self._blueprints = {} def _load_blueprint(self, blueprint_path) -> Blueprint: """Load a blueprint.""" try: blueprint_data = yaml.load_yaml(self.blueprint_folder / blueprint_path) except FileNotFoundError as err: raise FailedToLoad( self.domain, blueprint_path, FileNotFoundError(f"Unable to find {blueprint_path}"), ) from err except HomeAssistantError as err: raise FailedToLoad(self.domain, blueprint_path, err) from err return Blueprint( blueprint_data, expected_domain=self.domain, path=blueprint_path ) def _load_blueprints(self) -> dict[str, Blueprint | BlueprintException]: """Load all the blueprints.""" blueprint_folder = pathlib.Path( self.hass.config.path(BLUEPRINT_FOLDER, self.domain) ) results = {} for blueprint_path in blueprint_folder.glob("**/*.yaml"): blueprint_path = str(blueprint_path.relative_to(blueprint_folder)) if self._blueprints.get(blueprint_path) is None: try: self._blueprints[blueprint_path] = self._load_blueprint( blueprint_path ) except BlueprintException as err: self._blueprints[blueprint_path] = None results[blueprint_path] = err continue results[blueprint_path] = self._blueprints[blueprint_path] return results async def async_get_blueprints( self, ) -> dict[str, Blueprint | BlueprintException]: """Get all the blueprints.""" async with self._load_lock: return await self.hass.async_add_executor_job(self._load_blueprints) async def async_get_blueprint(self, blueprint_path: str) -> Blueprint: """Get a blueprint.""" def load_from_cache(): """Load blueprint from cache.""" blueprint = self._blueprints[blueprint_path] if blueprint is None: raise FailedToLoad( self.domain, blueprint_path, FileNotFoundError(f"Unable to find {blueprint_path}"), ) return blueprint if blueprint_path in self._blueprints: return load_from_cache() async with self._load_lock: # Check it again if blueprint_path in self._blueprints: return load_from_cache() try: blueprint = await self.hass.async_add_executor_job( self._load_blueprint, blueprint_path ) except Exception: self._blueprints[blueprint_path] = None raise self._blueprints[blueprint_path] = blueprint return blueprint async def async_inputs_from_config( self, config_with_blueprint: dict ) -> BlueprintInputs: """Process a blueprint config.""" try: config_with_blueprint = BLUEPRINT_INSTANCE_FIELDS(config_with_blueprint) except vol.Invalid as err: raise InvalidBlueprintInputs( self.domain, humanize_error(config_with_blueprint, err) ) from err bp_conf = config_with_blueprint[CONF_USE_BLUEPRINT] blueprint = await self.async_get_blueprint(bp_conf[CONF_PATH]) inputs = BlueprintInputs(blueprint, config_with_blueprint) inputs.validate() return inputs async def async_remove_blueprint(self, blueprint_path: str) -> None: """Remove a blueprint file.""" path = self.blueprint_folder / blueprint_path await self.hass.async_add_executor_job(path.unlink) self._blueprints[blueprint_path] = None def _create_file(self, blueprint: Blueprint, blueprint_path: str) -> None: """Create blueprint file.""" path = pathlib.Path( self.hass.config.path(BLUEPRINT_FOLDER, self.domain, blueprint_path) ) if path.exists(): raise FileAlreadyExists(self.domain, blueprint_path) path.parent.mkdir(parents=True, exist_ok=True) path.write_text(blueprint.yaml()) async def async_add_blueprint( self, blueprint: Blueprint, blueprint_path: str ) -> None: """Add a blueprint.""" if not blueprint_path.endswith(".yaml"): blueprint_path = f"{blueprint_path}.yaml" await self.hass.async_add_executor_job( self._create_file, blueprint, blueprint_path ) self._blueprints[blueprint_path] = blueprint async def async_populate(self) -> None: """Create folder if it doesn't exist and populate with examples.""" integration = await loader.async_get_integration(self.hass, self.domain) def populate(): if self.blueprint_folder.exists(): return shutil.copytree( integration.file_path / BLUEPRINT_FOLDER, self.blueprint_folder / HA_DOMAIN, ) await self.hass.async_add_executor_job(populate)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/blueprint/models.py
"""Support for Genius Hub switch/outlet devices.""" from datetime import timedelta import voluptuous as vol from homeassistant.components.switch import DEVICE_CLASS_OUTLET, SwitchEntity from homeassistant.const import ATTR_ENTITY_ID from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.typing import ConfigType, HomeAssistantType from . import ATTR_DURATION, DOMAIN, GeniusZone GH_ON_OFF_ZONE = "on / off" SVC_SET_SWITCH_OVERRIDE = "set_switch_override" SET_SWITCH_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Optional(ATTR_DURATION): vol.All( cv.time_period, vol.Range(min=timedelta(minutes=5), max=timedelta(days=1)), ), } ) async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None ) -> None: """Set up the Genius Hub switch entities.""" if discovery_info is None: return broker = hass.data[DOMAIN]["broker"] async_add_entities( [ GeniusSwitch(broker, z) for z in broker.client.zone_objs if z.data["type"] == GH_ON_OFF_ZONE ] ) # Register custom services platform = entity_platform.current_platform.get() platform.async_register_entity_service( SVC_SET_SWITCH_OVERRIDE, SET_SWITCH_OVERRIDE_SCHEMA, "async_turn_on", ) class GeniusSwitch(GeniusZone, SwitchEntity): """Representation of a Genius Hub switch.""" @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_OUTLET @property def is_on(self) -> bool: """Return the current state of the on/off zone. The zone is considered 'on' if & only if it is override/on (e.g. timer/on is 'off'). """ return self._zone.data["mode"] == "override" and self._zone.data["setpoint"] async def async_turn_off(self, **kwargs) -> None: """Send the zone to Timer mode. The zone is deemed 'off' in this mode, although the plugs may actually be on. """ await self._zone.set_mode("timer") async def async_turn_on(self, **kwargs) -> None: """Set the zone to override/on ({'setpoint': true}) for x seconds.""" await self._zone.set_override(1, kwargs.get(ATTR_DURATION, 3600))
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/geniushub/switch.py
"""Support for exposing Home Assistant via Zeroconf.""" from __future__ import annotations from contextlib import suppress import fnmatch from functools import partial import ipaddress import logging import socket from typing import Any, TypedDict import voluptuous as vol from zeroconf import ( Error as ZeroconfError, InterfaceChoice, IPVersion, NonUniqueNameException, ServiceInfo, ServiceStateChange, Zeroconf, ) from homeassistant import util from homeassistant.const import ( EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, __version__, ) from homeassistant.core import Event, HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.singleton import singleton from homeassistant.loader import async_get_homekit, async_get_zeroconf from .models import HaServiceBrowser, HaZeroconf from .usage import install_multiple_zeroconf_catcher _LOGGER = logging.getLogger(__name__) DOMAIN = "zeroconf" ZEROCONF_TYPE = "_home-assistant._tcp.local." HOMEKIT_TYPES = [ "_hap._tcp.local.", # Thread based devices "_hap._udp.local.", ] CONF_DEFAULT_INTERFACE = "default_interface" CONF_IPV6 = "ipv6" DEFAULT_DEFAULT_INTERFACE = True DEFAULT_IPV6 = True HOMEKIT_PAIRED_STATUS_FLAG = "sf" HOMEKIT_MODEL = "md" # Property key=value has a max length of 255 # so we use 230 to leave space for key= MAX_PROPERTY_VALUE_LEN = 230 # Dns label max length MAX_NAME_LEN = 63 CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional( CONF_DEFAULT_INTERFACE, default=DEFAULT_DEFAULT_INTERFACE ): cv.boolean, vol.Optional(CONF_IPV6, default=DEFAULT_IPV6): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) class HaServiceInfo(TypedDict): """Prepared info from mDNS entries.""" host: str port: int | None hostname: str type: str name: str properties: dict[str, Any] @singleton(DOMAIN) async def async_get_instance(hass: HomeAssistant) -> HaZeroconf: """Zeroconf instance to be shared with other integrations that use it.""" return await _async_get_instance(hass) async def _async_get_instance(hass: HomeAssistant, **zcargs: Any) -> HaZeroconf: logging.getLogger("zeroconf").setLevel(logging.NOTSET) zeroconf = await hass.async_add_executor_job(partial(HaZeroconf, **zcargs)) install_multiple_zeroconf_catcher(zeroconf) def _stop_zeroconf(_event: Event) -> None: """Stop Zeroconf.""" zeroconf.ha_close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _stop_zeroconf) return zeroconf async def async_setup(hass: HomeAssistant, config: dict) -> bool: """Set up Zeroconf and make Home Assistant discoverable.""" zc_config = config.get(DOMAIN, {}) zc_args: dict = {} if zc_config.get(CONF_DEFAULT_INTERFACE, DEFAULT_DEFAULT_INTERFACE): zc_args["interfaces"] = InterfaceChoice.Default if not zc_config.get(CONF_IPV6, DEFAULT_IPV6): zc_args["ip_version"] = IPVersion.V4Only zeroconf = hass.data[DOMAIN] = await _async_get_instance(hass, **zc_args) async def _async_zeroconf_hass_start(_event: Event) -> None: """Expose Home Assistant on zeroconf when it starts. Wait till started or otherwise HTTP is not up and running. """ uuid = await hass.helpers.instance_id.async_get() await hass.async_add_executor_job( _register_hass_zc_service, hass, zeroconf, uuid ) async def _async_zeroconf_hass_started(_event: Event) -> None: """Start the service browser.""" await _async_start_zeroconf_browser(hass, zeroconf) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_zeroconf_hass_start) hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STARTED, _async_zeroconf_hass_started ) return True def _register_hass_zc_service( hass: HomeAssistant, zeroconf: HaZeroconf, uuid: str ) -> None: # Get instance UUID valid_location_name = _truncate_location_name_to_valid(hass.config.location_name) params = { "location_name": valid_location_name, "uuid": uuid, "version": __version__, "external_url": "", "internal_url": "", # Old base URL, for backward compatibility "base_url": "", # Always needs authentication "requires_api_password": True, } # Get instance URL's with suppress(NoURLAvailableError): params["external_url"] = get_url(hass, allow_internal=False) with suppress(NoURLAvailableError): params["internal_url"] = get_url(hass, allow_external=False) # Set old base URL based on external or internal params["base_url"] = params["external_url"] or params["internal_url"] host_ip = util.get_local_ip() try: host_ip_pton = socket.inet_pton(socket.AF_INET, host_ip) except OSError: host_ip_pton = socket.inet_pton(socket.AF_INET6, host_ip) _suppress_invalid_properties(params) info = ServiceInfo( ZEROCONF_TYPE, name=f"{valid_location_name}.{ZEROCONF_TYPE}", server=f"{uuid}.local.", addresses=[host_ip_pton], port=hass.http.server_port, properties=params, ) _LOGGER.info("Starting Zeroconf broadcast") try: zeroconf.register_service(info) except NonUniqueNameException: _LOGGER.error( "Home Assistant instance with identical name present in the local network" ) async def _async_start_zeroconf_browser( hass: HomeAssistant, zeroconf: HaZeroconf ) -> None: """Start the zeroconf browser.""" zeroconf_types = await async_get_zeroconf(hass) homekit_models = await async_get_homekit(hass) types = list(zeroconf_types) for hk_type in HOMEKIT_TYPES: if hk_type not in zeroconf_types: types.append(hk_type) def service_update( zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange, ) -> None: """Service state changed.""" nonlocal zeroconf_types nonlocal homekit_models if state_change == ServiceStateChange.Removed: return try: service_info = zeroconf.get_service_info(service_type, name) except ZeroconfError: _LOGGER.exception("Failed to get info for device %s", name) return if not service_info: # Prevent the browser thread from collapsing as # service_info can be None _LOGGER.debug("Failed to get info for device %s", name) return info = info_from_service(service_info) if not info: # Prevent the browser thread from collapsing _LOGGER.debug("Failed to get addresses for device %s", name) return _LOGGER.debug("Discovered new device %s %s", name, info) # If we can handle it as a HomeKit discovery, we do that here. if service_type in HOMEKIT_TYPES: discovery_was_forwarded = handle_homekit(hass, homekit_models, info) # Continue on here as homekit_controller # still needs to get updates on devices # so it can see when the 'c#' field is updated. # # We only send updates to homekit_controller # if the device is already paired in order to avoid # offering a second discovery for the same device if ( discovery_was_forwarded and HOMEKIT_PAIRED_STATUS_FLAG in info["properties"] ): try: # 0 means paired and not discoverable by iOS clients) if int(info["properties"][HOMEKIT_PAIRED_STATUS_FLAG]): return except ValueError: # HomeKit pairing status unknown # likely bad homekit data return if "name" in info: lowercase_name: str | None = info["name"].lower() else: lowercase_name = None if "macaddress" in info["properties"]: uppercase_mac: str | None = info["properties"]["macaddress"].upper() else: uppercase_mac = None # Not all homekit types are currently used for discovery # so not all service type exist in zeroconf_types for entry in zeroconf_types.get(service_type, []): if len(entry) > 1: if ( uppercase_mac is not None and "macaddress" in entry and not fnmatch.fnmatch(uppercase_mac, entry["macaddress"]) ): continue if ( lowercase_name is not None and "name" in entry and not fnmatch.fnmatch(lowercase_name, entry["name"]) ): continue hass.add_job( hass.config_entries.flow.async_init( entry["domain"], context={"source": DOMAIN}, data=info ) # type: ignore ) _LOGGER.debug("Starting Zeroconf browser") HaServiceBrowser(zeroconf, types, handlers=[service_update]) def handle_homekit( hass: HomeAssistant, homekit_models: dict[str, str], info: HaServiceInfo ) -> bool: """Handle a HomeKit discovery. Return if discovery was forwarded. """ model = None props = info["properties"] for key in props: if key.lower() == HOMEKIT_MODEL: model = props[key] break if model is None: return False for test_model in homekit_models: if ( model != test_model and not model.startswith(f"{test_model} ") and not model.startswith(f"{test_model}-") ): continue hass.add_job( hass.config_entries.flow.async_init( homekit_models[test_model], context={"source": "homekit"}, data=info ) # type: ignore ) return True return False def info_from_service(service: ServiceInfo) -> HaServiceInfo | None: """Return prepared info from mDNS entries.""" properties: dict[str, Any] = {"_raw": {}} for key, value in service.properties.items(): # See https://ietf.org/rfc/rfc6763.html#section-6.4 and # https://ietf.org/rfc/rfc6763.html#section-6.5 for expected encodings # for property keys and values try: key = key.decode("ascii") except UnicodeDecodeError: _LOGGER.debug( "Ignoring invalid key provided by [%s]: %s", service.name, key ) continue properties["_raw"][key] = value with suppress(UnicodeDecodeError): if isinstance(value, bytes): properties[key] = value.decode("utf-8") if not service.addresses: return None address = service.addresses[0] return { "host": str(ipaddress.ip_address(address)), "port": service.port, "hostname": service.server, "type": service.type, "name": service.name, "properties": properties, } def _suppress_invalid_properties(properties: dict) -> None: """Suppress any properties that will cause zeroconf to fail to startup.""" for prop, prop_value in properties.items(): if not isinstance(prop_value, str): continue if len(prop_value.encode("utf-8")) > MAX_PROPERTY_VALUE_LEN: _LOGGER.error( "The property '%s' was suppressed because it is longer than the maximum length of %d bytes: %s", prop, MAX_PROPERTY_VALUE_LEN, prop_value, ) properties[prop] = "" def _truncate_location_name_to_valid(location_name: str) -> str: """Truncate or return the location name usable for zeroconf.""" if len(location_name.encode("utf-8")) < MAX_NAME_LEN: return location_name _LOGGER.warning( "The location name was truncated because it is longer than the maximum length of %d bytes: %s", MAX_NAME_LEN, location_name, ) return location_name.encode("utf-8")[:MAX_NAME_LEN].decode("utf-8", "ignore")
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/zeroconf/__init__.py
"""Support for Nest devices.""" import asyncio import logging from google_nest_sdm.event import EventMessage from google_nest_sdm.exceptions import ( AuthException, ConfigurationException, GoogleNestException, ) from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber import voluptuous as vol from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry from homeassistant.const import ( CONF_BINARY_SENSORS, CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_MONITORED_CONDITIONS, CONF_SENSORS, CONF_STRUCTURE, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import ( aiohttp_client, config_entry_oauth2_flow, config_validation as cv, ) from . import api, config_flow from .const import DATA_SDM, DATA_SUBSCRIBER, DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN from .events import EVENT_NAME_MAP, NEST_EVENT from .legacy import async_setup_legacy, async_setup_legacy_entry _CONFIGURING = {} _LOGGER = logging.getLogger(__name__) CONF_PROJECT_ID = "project_id" CONF_SUBSCRIBER_ID = "subscriber_id" DATA_NEST_CONFIG = "nest_config" DATA_NEST_UNAVAILABLE = "nest_unavailable" NEST_SETUP_NOTIFICATION = "nest_setup" SENSOR_SCHEMA = vol.Schema( {vol.Optional(CONF_MONITORED_CONDITIONS): vol.All(cv.ensure_list)} ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_CLIENT_ID): cv.string, vol.Required(CONF_CLIENT_SECRET): cv.string, # Required to use the new API (optional for compatibility) vol.Optional(CONF_PROJECT_ID): cv.string, vol.Optional(CONF_SUBSCRIBER_ID): cv.string, # Config that only currently works on the old API vol.Optional(CONF_STRUCTURE): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_SENSORS): SENSOR_SCHEMA, vol.Optional(CONF_BINARY_SENSORS): SENSOR_SCHEMA, } ) }, extra=vol.ALLOW_EXTRA, ) # Platforms for SDM API PLATFORMS = ["sensor", "camera", "climate"] async def async_setup(hass: HomeAssistant, config: dict): """Set up Nest components with dispatch between old/new flows.""" hass.data[DOMAIN] = {} if DOMAIN not in config: return True if CONF_PROJECT_ID not in config[DOMAIN]: return await async_setup_legacy(hass, config) if CONF_SUBSCRIBER_ID not in config[DOMAIN]: _LOGGER.error("Configuration option '{CONF_SUBSCRIBER_ID}' required") return False # For setup of ConfigEntry below hass.data[DOMAIN][DATA_NEST_CONFIG] = config[DOMAIN] project_id = config[DOMAIN][CONF_PROJECT_ID] config_flow.NestFlowHandler.register_sdm_api(hass) config_flow.NestFlowHandler.async_register_implementation( hass, config_entry_oauth2_flow.LocalOAuth2Implementation( hass, DOMAIN, config[DOMAIN][CONF_CLIENT_ID], config[DOMAIN][CONF_CLIENT_SECRET], OAUTH2_AUTHORIZE.format(project_id=project_id), OAUTH2_TOKEN, ), ) return True class SignalUpdateCallback: """An EventCallback invoked when new events arrive from subscriber.""" def __init__(self, hass: HomeAssistant): """Initialize EventCallback.""" self._hass = hass async def async_handle_event(self, event_message: EventMessage): """Process an incoming EventMessage.""" if not event_message.resource_update_name: return device_id = event_message.resource_update_name events = event_message.resource_update_events if not events: return _LOGGER.debug("Event Update %s", events.keys()) device_registry = await self._hass.helpers.device_registry.async_get_registry() device_entry = device_registry.async_get_device({(DOMAIN, device_id)}) if not device_entry: return for event in events: event_type = EVENT_NAME_MAP.get(event) if not event_type: continue message = { "device_id": device_entry.id, "type": event_type, "timestamp": event_message.timestamp, } self._hass.bus.async_fire(NEST_EVENT, message) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Nest from a config entry with dispatch between old/new flows.""" if DATA_SDM not in entry.data: return await async_setup_legacy_entry(hass, entry) implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( hass, entry ) ) config = hass.data[DOMAIN][DATA_NEST_CONFIG] session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) auth = api.AsyncConfigEntryAuth( aiohttp_client.async_get_clientsession(hass), session, config[CONF_CLIENT_ID], config[CONF_CLIENT_SECRET], ) subscriber = GoogleNestSubscriber( auth, config[CONF_PROJECT_ID], config[CONF_SUBSCRIBER_ID] ) callback = SignalUpdateCallback(hass) subscriber.set_update_callback(callback.async_handle_event) try: await subscriber.start_async() except AuthException as err: _LOGGER.debug("Subscriber authentication error: %s", err) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data, ) ) return False except ConfigurationException as err: _LOGGER.error("Configuration error: %s", err) subscriber.stop_async() return False except GoogleNestException as err: if DATA_NEST_UNAVAILABLE not in hass.data[DOMAIN]: _LOGGER.error("Subscriber error: %s", err) hass.data[DOMAIN][DATA_NEST_UNAVAILABLE] = True subscriber.stop_async() raise ConfigEntryNotReady from err try: await subscriber.async_get_device_manager() except GoogleNestException as err: if DATA_NEST_UNAVAILABLE not in hass.data[DOMAIN]: _LOGGER.error("Device manager error: %s", err) hass.data[DOMAIN][DATA_NEST_UNAVAILABLE] = True subscriber.stop_async() raise ConfigEntryNotReady from err hass.data[DOMAIN].pop(DATA_NEST_UNAVAILABLE, None) hass.data[DOMAIN][DATA_SUBSCRIBER] = subscriber for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" if DATA_SDM not in entry.data: # Legacy API return True _LOGGER.debug("Stopping nest subscriber") subscriber = hass.data[DOMAIN][DATA_SUBSCRIBER] subscriber.stop_async() unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(DATA_SUBSCRIBER) hass.data[DOMAIN].pop(DATA_NEST_UNAVAILABLE, None) return unload_ok
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/nest/__init__.py
"""Provides a binary sensor which gets its values from a TCP socket.""" from homeassistant.components.binary_sensor import BinarySensorEntity from .sensor import CONF_VALUE_ON, PLATFORM_SCHEMA, TcpSensor PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({}) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the TCP binary sensor.""" add_entities([TcpBinarySensor(hass, config)]) class TcpBinarySensor(BinarySensorEntity, TcpSensor): """A binary sensor which is on when its state == CONF_VALUE_ON.""" required = (CONF_VALUE_ON,) @property def is_on(self): """Return true if the binary sensor is on.""" return self._state == self._config[CONF_VALUE_ON]
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/tcp/binary_sensor.py
"""Register an iFrame front end panel.""" import voluptuous as vol from homeassistant.const import CONF_ICON, CONF_URL import homeassistant.helpers.config_validation as cv DOMAIN = "panel_iframe" CONF_TITLE = "title" CONF_RELATIVE_URL_ERROR_MSG = "Invalid relative URL. Absolute path required." CONF_RELATIVE_URL_REGEX = r"\A/" CONF_REQUIRE_ADMIN = "require_admin" CONFIG_SCHEMA = vol.Schema( { DOMAIN: cv.schema_with_slug_keys( vol.Schema( { # pylint: disable=no-value-for-parameter vol.Optional(CONF_TITLE): cv.string, vol.Optional(CONF_ICON): cv.icon, vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, vol.Required(CONF_URL): vol.Any( vol.Match( CONF_RELATIVE_URL_REGEX, msg=CONF_RELATIVE_URL_ERROR_MSG ), vol.Url(), ), } ) ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the iFrame frontend panels.""" for url_path, info in config[DOMAIN].items(): hass.components.frontend.async_register_built_in_panel( "iframe", info.get(CONF_TITLE), info.get(CONF_ICON), url_path, {"url": info[CONF_URL]}, require_admin=info[CONF_REQUIRE_ADMIN], ) return True
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/panel_iframe/__init__.py
"""Support for lights through the SmartThings cloud API.""" from __future__ import annotations import asyncio from typing import Sequence from pysmartthings import Capability from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_HS_COLOR, ATTR_TRANSITION, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_TRANSITION, LightEntity, ) import homeassistant.util.color as color_util from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN async def async_setup_entry(hass, config_entry, async_add_entities): """Add lights for a config entry.""" broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id] async_add_entities( [ SmartThingsLight(device) for device in broker.devices.values() if broker.any_assigned(device.device_id, "light") ], True, ) def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None: """Return all capabilities supported if minimum required are present.""" supported = [ Capability.switch, Capability.switch_level, Capability.color_control, Capability.color_temperature, ] # Must be able to be turned on/off. if Capability.switch not in capabilities: return None # Must have one of these light_capabilities = [ Capability.color_control, Capability.color_temperature, Capability.switch_level, ] if any(capability in capabilities for capability in light_capabilities): return supported return None def convert_scale(value, value_scale, target_scale, round_digits=4): """Convert a value to a different scale.""" return round(value * target_scale / value_scale, round_digits) class SmartThingsLight(SmartThingsEntity, LightEntity): """Define a SmartThings Light.""" def __init__(self, device): """Initialize a SmartThingsLight.""" super().__init__(device) self._brightness = None self._color_temp = None self._hs_color = None self._supported_features = self._determine_features() def _determine_features(self): """Get features supported by the device.""" features = 0 # Brightness and transition if Capability.switch_level in self._device.capabilities: features |= SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION # Color Temperature if Capability.color_temperature in self._device.capabilities: features |= SUPPORT_COLOR_TEMP # Color if Capability.color_control in self._device.capabilities: features |= SUPPORT_COLOR return features async def async_turn_on(self, **kwargs) -> None: """Turn the light on.""" tasks = [] # Color temperature if self._supported_features & SUPPORT_COLOR_TEMP and ATTR_COLOR_TEMP in kwargs: tasks.append(self.async_set_color_temp(kwargs[ATTR_COLOR_TEMP])) # Color if self._supported_features & SUPPORT_COLOR and ATTR_HS_COLOR in kwargs: tasks.append(self.async_set_color(kwargs[ATTR_HS_COLOR])) if tasks: # Set temp/color first await asyncio.gather(*tasks) # Switch/brightness/transition if self._supported_features & SUPPORT_BRIGHTNESS and ATTR_BRIGHTNESS in kwargs: await self.async_set_level( kwargs[ATTR_BRIGHTNESS], kwargs.get(ATTR_TRANSITION, 0) ) else: await self._device.switch_on(set_status=True) # State is set optimistically in the commands above, therefore update # the entity state ahead of receiving the confirming push updates self.async_schedule_update_ha_state(True) async def async_turn_off(self, **kwargs) -> None: """Turn the light off.""" # Switch/transition if self._supported_features & SUPPORT_TRANSITION and ATTR_TRANSITION in kwargs: await self.async_set_level(0, int(kwargs[ATTR_TRANSITION])) else: await self._device.switch_off(set_status=True) # State is set optimistically in the commands above, therefore update # the entity state ahead of receiving the confirming push updates self.async_schedule_update_ha_state(True) async def async_update(self): """Update entity attributes when the device status has changed.""" # Brightness and transition if self._supported_features & SUPPORT_BRIGHTNESS: self._brightness = int( convert_scale(self._device.status.level, 100, 255, 0) ) # Color Temperature if self._supported_features & SUPPORT_COLOR_TEMP: self._color_temp = color_util.color_temperature_kelvin_to_mired( self._device.status.color_temperature ) # Color if self._supported_features & SUPPORT_COLOR: self._hs_color = ( convert_scale(self._device.status.hue, 100, 360), self._device.status.saturation, ) async def async_set_color(self, hs_color): """Set the color of the device.""" hue = convert_scale(float(hs_color[0]), 360, 100) hue = max(min(hue, 100.0), 0.0) saturation = max(min(float(hs_color[1]), 100.0), 0.0) await self._device.set_color(hue, saturation, set_status=True) async def async_set_color_temp(self, value: float): """Set the color temperature of the device.""" kelvin = color_util.color_temperature_mired_to_kelvin(value) kelvin = max(min(kelvin, 30000.0), 1.0) await self._device.set_color_temperature(kelvin, set_status=True) async def async_set_level(self, brightness: int, transition: int): """Set the brightness of the light over transition.""" level = int(convert_scale(brightness, 255, 100, 0)) # Due to rounding, set level to 1 (one) so we don't inadvertently # turn off the light when a low brightness is set. level = 1 if level == 0 and brightness > 0 else level level = max(min(level, 100), 0) duration = int(transition) await self._device.set_level(level, duration, set_status=True) @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def color_temp(self): """Return the CT color value in mireds.""" return self._color_temp @property def hs_color(self): """Return the hue and saturation color value [float, float].""" return self._hs_color @property def is_on(self) -> bool: """Return true if light is on.""" return self._device.status.switch @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # lowest kelvin found supported across 20+ handlers. return 500 # 2000K @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # highest kelvin found supported across 20+ handlers. return 111 # 9000K @property def supported_features(self) -> int: """Flag supported features.""" return self._supported_features
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/smartthings/light.py
"""Support for LimitlessLED bulbs.""" import logging from limitlessled import Color from limitlessled.bridge import Bridge from limitlessled.group.dimmer import DimmerGroup from limitlessled.group.rgbw import RgbwGroup from limitlessled.group.rgbww import RgbwwGroup from limitlessled.group.white import WhiteGroup from limitlessled.pipeline import Pipeline from limitlessled.presets import COLORLOOP import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_WHITE, FLASH_LONG, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, CONF_TYPE, STATE_ON import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.util.color import color_hs_to_RGB, color_temperature_mired_to_kelvin _LOGGER = logging.getLogger(__name__) CONF_BRIDGES = "bridges" CONF_GROUPS = "groups" CONF_NUMBER = "number" CONF_VERSION = "version" CONF_FADE = "fade" DEFAULT_LED_TYPE = "rgbw" DEFAULT_PORT = 5987 DEFAULT_TRANSITION = 0 DEFAULT_VERSION = 6 DEFAULT_FADE = False LED_TYPE = ["rgbw", "rgbww", "white", "bridge-led", "dimmer"] EFFECT_NIGHT = "night" MIN_SATURATION = 10 WHITE = [0, 0] SUPPORT_LIMITLESSLED_WHITE = ( SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_TRANSITION ) SUPPORT_LIMITLESSLED_DIMMER = SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION SUPPORT_LIMITLESSLED_RGB = ( SUPPORT_BRIGHTNESS | SUPPORT_EFFECT | SUPPORT_FLASH | SUPPORT_COLOR | SUPPORT_TRANSITION ) SUPPORT_LIMITLESSLED_RGBWW = ( SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_FLASH | SUPPORT_COLOR | SUPPORT_TRANSITION ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_BRIDGES): vol.All( cv.ensure_list, [ { vol.Required(CONF_HOST): cv.string, vol.Optional( CONF_VERSION, default=DEFAULT_VERSION ): cv.positive_int, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Required(CONF_GROUPS): vol.All( cv.ensure_list, [ { vol.Required(CONF_NAME): cv.string, vol.Optional( CONF_TYPE, default=DEFAULT_LED_TYPE ): vol.In(LED_TYPE), vol.Required(CONF_NUMBER): cv.positive_int, vol.Optional( CONF_FADE, default=DEFAULT_FADE ): cv.boolean, } ], ), } ], ) } ) def rewrite_legacy(config): """Rewrite legacy configuration to new format.""" bridges = config.get(CONF_BRIDGES, [config]) new_bridges = [] for bridge_conf in bridges: groups = [] if "groups" in bridge_conf: groups = bridge_conf["groups"] else: _LOGGER.warning("Legacy configuration format detected") for i in range(1, 5): name_key = "group_%d_name" % i if name_key in bridge_conf: groups.append( { "number": i, "type": bridge_conf.get( "group_%d_type" % i, DEFAULT_LED_TYPE ), "name": bridge_conf.get(name_key), } ) new_bridges.append( { "host": bridge_conf.get(CONF_HOST), "version": bridge_conf.get(CONF_VERSION), "port": bridge_conf.get(CONF_PORT), "groups": groups, } ) return {"bridges": new_bridges} def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the LimitlessLED lights.""" # Two legacy configuration formats are supported to maintain backwards # compatibility. config = rewrite_legacy(config) # Use the expanded configuration format. lights = [] for bridge_conf in config.get(CONF_BRIDGES): bridge = Bridge( bridge_conf.get(CONF_HOST), port=bridge_conf.get(CONF_PORT, DEFAULT_PORT), version=bridge_conf.get(CONF_VERSION, DEFAULT_VERSION), ) for group_conf in bridge_conf.get(CONF_GROUPS): group = bridge.add_group( group_conf.get(CONF_NUMBER), group_conf.get(CONF_NAME), group_conf.get(CONF_TYPE, DEFAULT_LED_TYPE), ) lights.append(LimitlessLEDGroup(group, {"fade": group_conf[CONF_FADE]})) add_entities(lights) def state(new_state): """State decorator. Specify True (turn on) or False (turn off). """ def decorator(function): """Set up the decorator function.""" def wrapper(self, **kwargs): """Wrap a group state change.""" pipeline = Pipeline() transition_time = DEFAULT_TRANSITION if self._effect == EFFECT_COLORLOOP: self.group.stop() self._effect = None # Set transition time. if ATTR_TRANSITION in kwargs: transition_time = int(kwargs[ATTR_TRANSITION]) # Do group type-specific work. function(self, transition_time, pipeline, **kwargs) # Update state. self._is_on = new_state self.group.enqueue(pipeline) self.schedule_update_ha_state() return wrapper return decorator class LimitlessLEDGroup(LightEntity, RestoreEntity): """Representation of a LimitessLED group.""" def __init__(self, group, config): """Initialize a group.""" if isinstance(group, WhiteGroup): self._supported = SUPPORT_LIMITLESSLED_WHITE self._effect_list = [EFFECT_NIGHT] elif isinstance(group, DimmerGroup): self._supported = SUPPORT_LIMITLESSLED_DIMMER self._effect_list = [] elif isinstance(group, RgbwGroup): self._supported = SUPPORT_LIMITLESSLED_RGB self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE] elif isinstance(group, RgbwwGroup): self._supported = SUPPORT_LIMITLESSLED_RGBWW self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE] self.group = group self.config = config self._is_on = False self._brightness = None self._temperature = None self._color = None self._effect = None async def async_added_to_hass(self): """Handle entity about to be added to hass event.""" await super().async_added_to_hass() last_state = await self.async_get_last_state() if last_state: self._is_on = last_state.state == STATE_ON self._brightness = last_state.attributes.get("brightness") self._temperature = last_state.attributes.get("color_temp") self._color = last_state.attributes.get("hs_color") @property def should_poll(self): """No polling needed.""" return False @property def assumed_state(self): """Return True because unable to access real state of the entity.""" return True @property def name(self): """Return the name of the group.""" return self.group.name @property def is_on(self): """Return true if device is on.""" return self._is_on @property def brightness(self): """Return the brightness property.""" if self._effect == EFFECT_NIGHT: return 1 return self._brightness @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" return 154 @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" return 370 @property def color_temp(self): """Return the temperature property.""" if self.hs_color is not None: return None return self._temperature @property def hs_color(self): """Return the color property.""" if self._effect == EFFECT_NIGHT: return None if self._color is None or self._color[1] == 0: return None return self._color @property def supported_features(self): """Flag supported features.""" return self._supported @property def effect(self): """Return the current effect for this light.""" return self._effect @property def effect_list(self): """Return the list of supported effects for this light.""" return self._effect_list # pylint: disable=arguments-differ @state(False) def turn_off(self, transition_time, pipeline, **kwargs): """Turn off a group.""" if self.config[CONF_FADE]: pipeline.transition(transition_time, brightness=0.0) pipeline.off() # pylint: disable=arguments-differ @state(True) def turn_on(self, transition_time, pipeline, **kwargs): """Turn on (or adjust property of) a group.""" # The night effect does not need a turned on light if kwargs.get(ATTR_EFFECT) == EFFECT_NIGHT: if EFFECT_NIGHT in self._effect_list: pipeline.night_light() self._effect = EFFECT_NIGHT return pipeline.on() # Set up transition. args = {} if self.config[CONF_FADE] and not self.is_on and self._brightness: args["brightness"] = self.limitlessled_brightness() if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] args["brightness"] = self.limitlessled_brightness() if ATTR_HS_COLOR in kwargs and self._supported & SUPPORT_COLOR: self._color = kwargs[ATTR_HS_COLOR] # White is a special case. if self._color[1] < MIN_SATURATION: pipeline.white() self._color = WHITE else: args["color"] = self.limitlessled_color() if ATTR_COLOR_TEMP in kwargs: if self._supported & SUPPORT_COLOR: pipeline.white() self._color = WHITE if self._supported & SUPPORT_COLOR_TEMP: self._temperature = kwargs[ATTR_COLOR_TEMP] args["temperature"] = self.limitlessled_temperature() if args: pipeline.transition(transition_time, **args) # Flash. if ATTR_FLASH in kwargs and self._supported & SUPPORT_FLASH: duration = 0 if kwargs[ATTR_FLASH] == FLASH_LONG: duration = 1 pipeline.flash(duration=duration) # Add effects. if ATTR_EFFECT in kwargs and self._effect_list: if kwargs[ATTR_EFFECT] == EFFECT_COLORLOOP: self._effect = EFFECT_COLORLOOP pipeline.append(COLORLOOP) if kwargs[ATTR_EFFECT] == EFFECT_WHITE: pipeline.white() self._color = WHITE def limitlessled_temperature(self): """Convert Home Assistant color temperature units to percentage.""" max_kelvin = color_temperature_mired_to_kelvin(self.min_mireds) min_kelvin = color_temperature_mired_to_kelvin(self.max_mireds) width = max_kelvin - min_kelvin kelvin = color_temperature_mired_to_kelvin(self._temperature) temperature = (kelvin - min_kelvin) / width return max(0, min(1, temperature)) def limitlessled_brightness(self): """Convert Home Assistant brightness units to percentage.""" return self._brightness / 255 def limitlessled_color(self): """Convert Home Assistant HS list to RGB Color tuple.""" return Color(*color_hs_to_RGB(*tuple(self._color)))
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/limitlessled/light.py
"""Support for MyQ-Enabled Garage Doors.""" import logging from pymyq.const import ( DEVICE_STATE as MYQ_DEVICE_STATE, DEVICE_STATE_ONLINE as MYQ_DEVICE_STATE_ONLINE, DEVICE_TYPE_GATE as MYQ_DEVICE_TYPE_GATE, KNOWN_MODELS, MANUFACTURER, ) from pymyq.errors import MyQError from homeassistant.components.cover import ( DEVICE_CLASS_GARAGE, DEVICE_CLASS_GATE, SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity, ) from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, MYQ_COORDINATOR, MYQ_GATEWAY, MYQ_TO_HASS _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up mysq covers.""" data = hass.data[DOMAIN][config_entry.entry_id] myq = data[MYQ_GATEWAY] coordinator = data[MYQ_COORDINATOR] async_add_entities( [MyQDevice(coordinator, device) for device in myq.covers.values()], True ) class MyQDevice(CoordinatorEntity, CoverEntity): """Representation of a MyQ cover.""" def __init__(self, coordinator, device): """Initialize with API object, device id.""" super().__init__(coordinator) self._device = device @property def device_class(self): """Define this cover as a garage door.""" device_type = self._device.device_type if device_type is not None and device_type == MYQ_DEVICE_TYPE_GATE: return DEVICE_CLASS_GATE return DEVICE_CLASS_GARAGE @property def name(self): """Return the name of the garage door if any.""" return self._device.name @property def available(self): """Return if the device is online.""" if not self.coordinator.last_update_success: return False # Not all devices report online so assume True if its missing return self._device.device_json[MYQ_DEVICE_STATE].get( MYQ_DEVICE_STATE_ONLINE, True ) @property def is_closed(self): """Return true if cover is closed, else False.""" return MYQ_TO_HASS.get(self._device.state) == STATE_CLOSED @property def is_closing(self): """Return if the cover is closing or not.""" return MYQ_TO_HASS.get(self._device.state) == STATE_CLOSING @property def is_open(self): """Return if the cover is opening or not.""" return MYQ_TO_HASS.get(self._device.state) == STATE_OPEN @property def is_opening(self): """Return if the cover is opening or not.""" return MYQ_TO_HASS.get(self._device.state) == STATE_OPENING @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE @property def unique_id(self): """Return a unique, Home Assistant friendly identifier for this entity.""" return self._device.device_id async def async_close_cover(self, **kwargs): """Issue close command to cover.""" if self.is_closing or self.is_closed: return try: wait_task = await self._device.close(wait_for_state=False) except MyQError as err: _LOGGER.error( "Closing of cover %s failed with error: %s", self._device.name, str(err) ) return # Write closing state to HASS self.async_write_ha_state() if not await wait_task: _LOGGER.error("Closing of cover %s failed", self._device.name) # Write final state to HASS self.async_write_ha_state() async def async_open_cover(self, **kwargs): """Issue open command to cover.""" if self.is_opening or self.is_open: return try: wait_task = await self._device.open(wait_for_state=False) except MyQError as err: _LOGGER.error( "Opening of cover %s failed with error: %s", self._device.name, str(err) ) return # Write opening state to HASS self.async_write_ha_state() if not await wait_task: _LOGGER.error("Opening of cover %s failed", self._device.name) # Write final state to HASS self.async_write_ha_state() @property def device_info(self): """Return the device_info of the device.""" device_info = { "identifiers": {(DOMAIN, self._device.device_id)}, "name": self._device.name, "manufacturer": MANUFACTURER, "sw_version": self._device.firmware_version, } model = KNOWN_MODELS.get(self._device.device_id[2:4]) if model: device_info["model"] = model if self._device.parent_device_id: device_info["via_device"] = (DOMAIN, self._device.parent_device_id) return device_info async def async_added_to_hass(self): """Subscribe to updates.""" self.async_on_remove( self.coordinator.async_add_listener(self.async_write_ha_state) )
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/myq/cover.py
"""Support for w800rf32 binary sensors.""" import logging import W800rf32 as w800 import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_DEVICES, CONF_NAME from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, event as evt from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.util import dt as dt_util from . import W800RF32_DEVICE _LOGGER = logging.getLogger(__name__) CONF_OFF_DELAY = "off_delay" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICES): { cv.string: vol.Schema( { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_OFF_DELAY): vol.All( cv.time_period, cv.positive_timedelta ), } ) } }, extra=vol.ALLOW_EXTRA, ) async def async_setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Binary Sensor platform to w800rf32.""" binary_sensors = [] # device_id --> "c1 or a3" X10 device. entity (type dictionary) # --> name, device_class etc for device_id, entity in config[CONF_DEVICES].items(): _LOGGER.debug( "Add %s w800rf32.binary_sensor (class %s)", entity[CONF_NAME], entity.get(CONF_DEVICE_CLASS), ) device = W800rf32BinarySensor( device_id, entity.get(CONF_NAME), entity.get(CONF_DEVICE_CLASS), entity.get(CONF_OFF_DELAY), ) binary_sensors.append(device) add_entities(binary_sensors) class W800rf32BinarySensor(BinarySensorEntity): """A representation of a w800rf32 binary sensor.""" def __init__(self, device_id, name, device_class=None, off_delay=None): """Initialize the w800rf32 sensor.""" self._signal = W800RF32_DEVICE.format(device_id) self._name = name self._device_class = device_class self._off_delay = off_delay self._state = False self._delay_listener = None @callback def _off_delay_listener(self, now): """Switch device off after a delay.""" self._delay_listener = None self.update_state(False) @property def name(self): """Return the device name.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def device_class(self): """Return the sensor class.""" return self._device_class @property def is_on(self): """Return true if the sensor state is True.""" return self._state @callback def binary_sensor_update(self, event): """Call for control updates from the w800rf32 gateway.""" if not isinstance(event, w800.W800rf32Event): return dev_id = event.device command = event.command _LOGGER.debug( "BinarySensor update (Device ID: %s Command %s ...)", dev_id, command ) # Update the w800rf32 device state if command in ("On", "Off"): is_on = command == "On" self.update_state(is_on) if self.is_on and self._off_delay is not None and self._delay_listener is None: self._delay_listener = evt.async_track_point_in_time( self.hass, self._off_delay_listener, dt_util.utcnow() + self._off_delay ) def update_state(self, state): """Update the state of the device.""" self._state = state self.async_write_ha_state() async def async_added_to_hass(self): """Register update callback.""" async_dispatcher_connect(self.hass, self._signal, self.binary_sensor_update)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/w800rf32/binary_sensor.py
"""Support for Freebox devices (Freebox v6 and Freebox mini 4K).""" from __future__ import annotations import logging from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import DATA_RATE_KILOBYTES_PER_SECOND from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.dt as dt_util from .const import ( CALL_SENSORS, CONNECTION_SENSORS, DISK_PARTITION_SENSORS, DOMAIN, SENSOR_DEVICE_CLASS, SENSOR_ICON, SENSOR_NAME, SENSOR_UNIT, TEMPERATURE_SENSOR_TEMPLATE, ) from .router import FreeboxRouter _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the sensors.""" router = hass.data[DOMAIN][entry.unique_id] entities = [] _LOGGER.debug( "%s - %s - %s temperature sensors", router.name, router.mac, len(router.sensors_temperature), ) for sensor_name in router.sensors_temperature: entities.append( FreeboxSensor( router, sensor_name, {**TEMPERATURE_SENSOR_TEMPLATE, SENSOR_NAME: f"Freebox {sensor_name}"}, ) ) for sensor_key in CONNECTION_SENSORS: entities.append( FreeboxSensor(router, sensor_key, CONNECTION_SENSORS[sensor_key]) ) for sensor_key in CALL_SENSORS: entities.append(FreeboxCallSensor(router, sensor_key, CALL_SENSORS[sensor_key])) _LOGGER.debug("%s - %s - %s disk(s)", router.name, router.mac, len(router.disks)) for disk in router.disks.values(): for partition in disk["partitions"]: for sensor_key in DISK_PARTITION_SENSORS: entities.append( FreeboxDiskSensor( router, disk, partition, sensor_key, DISK_PARTITION_SENSORS[sensor_key], ) ) async_add_entities(entities, True) class FreeboxSensor(SensorEntity): """Representation of a Freebox sensor.""" def __init__( self, router: FreeboxRouter, sensor_type: str, sensor: dict[str, any] ) -> None: """Initialize a Freebox sensor.""" self._state = None self._router = router self._sensor_type = sensor_type self._name = sensor[SENSOR_NAME] self._unit = sensor[SENSOR_UNIT] self._icon = sensor[SENSOR_ICON] self._device_class = sensor[SENSOR_DEVICE_CLASS] self._unique_id = f"{self._router.mac} {self._name}" @callback def async_update_state(self) -> None: """Update the Freebox sensor.""" state = self._router.sensors[self._sensor_type] if self._unit == DATA_RATE_KILOBYTES_PER_SECOND: self._state = round(state / 1000, 2) else: self._state = state @property def unique_id(self) -> str: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name.""" return self._name @property def state(self) -> str: """Return the state.""" return self._state @property def unit_of_measurement(self) -> str: """Return the unit.""" return self._unit @property def icon(self) -> str: """Return the icon.""" return self._icon @property def device_class(self) -> str: """Return the device_class.""" return self._device_class @property def device_info(self) -> dict[str, any]: """Return the device information.""" return self._router.device_info @property def should_poll(self) -> bool: """No polling needed.""" return False @callback def async_on_demand_update(self): """Update state.""" self.async_update_state() self.async_write_ha_state() async def async_added_to_hass(self): """Register state update callback.""" self.async_update_state() self.async_on_remove( async_dispatcher_connect( self.hass, self._router.signal_sensor_update, self.async_on_demand_update, ) ) class FreeboxCallSensor(FreeboxSensor): """Representation of a Freebox call sensor.""" def __init__( self, router: FreeboxRouter, sensor_type: str, sensor: dict[str, any] ) -> None: """Initialize a Freebox call sensor.""" super().__init__(router, sensor_type, sensor) self._call_list_for_type = [] @callback def async_update_state(self) -> None: """Update the Freebox call sensor.""" self._call_list_for_type = [] if self._router.call_list: for call in self._router.call_list: if not call["new"]: continue if call["type"] == self._sensor_type: self._call_list_for_type.append(call) self._state = len(self._call_list_for_type) @property def extra_state_attributes(self) -> dict[str, any]: """Return device specific state attributes.""" return { dt_util.utc_from_timestamp(call["datetime"]).isoformat(): call["name"] for call in self._call_list_for_type } class FreeboxDiskSensor(FreeboxSensor): """Representation of a Freebox disk sensor.""" def __init__( self, router: FreeboxRouter, disk: dict[str, any], partition: dict[str, any], sensor_type: str, sensor: dict[str, any], ) -> None: """Initialize a Freebox disk sensor.""" super().__init__(router, sensor_type, sensor) self._disk = disk self._partition = partition self._name = f"{partition['label']} {sensor[SENSOR_NAME]}" self._unique_id = f"{self._router.mac} {sensor_type} {self._disk['id']} {self._partition['id']}" @property def device_info(self) -> dict[str, any]: """Return the device information.""" return { "identifiers": {(DOMAIN, self._disk["id"])}, "name": f"Disk {self._disk['id']}", "model": self._disk["model"], "sw_version": self._disk["firmware"], "via_device": ( DOMAIN, self._router.mac, ), } @callback def async_update_state(self) -> None: """Update the Freebox disk sensor.""" self._state = round( self._partition["free_bytes"] * 100 / self._partition["total_bytes"], 2 )
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/freebox/sensor.py
"""Support for Spider switches.""" from homeassistant.components.switch import SwitchEntity from .const import DOMAIN async def async_setup_entry(hass, config, async_add_entities): """Initialize a Spider Power Plug.""" api = hass.data[DOMAIN][config.entry_id] async_add_entities( [ SpiderPowerPlug(api, entity) for entity in await hass.async_add_executor_job(api.get_power_plugs) ] ) class SpiderPowerPlug(SwitchEntity): """Representation of a Spider Power Plug.""" def __init__(self, api, power_plug): """Initialize the Spider Power Plug.""" self.api = api self.power_plug = power_plug @property def device_info(self): """Return the device_info of the device.""" return { "identifiers": {(DOMAIN, self.power_plug.id)}, "name": self.power_plug.name, "manufacturer": self.power_plug.manufacturer, "model": self.power_plug.model, } @property def unique_id(self): """Return the ID of this switch.""" return self.power_plug.id @property def name(self): """Return the name of the switch if any.""" return self.power_plug.name @property def current_power_w(self): """Return the current power usage in W.""" return round(self.power_plug.current_energy_consumption) @property def today_energy_kwh(self): """Return the current power usage in Kwh.""" return round(self.power_plug.today_energy_consumption / 1000, 2) @property def is_on(self): """Return true if switch is on. Standby is on.""" return self.power_plug.is_on @property def available(self): """Return true if switch is available.""" return self.power_plug.is_available def turn_on(self, **kwargs): """Turn device on.""" self.power_plug.turn_on() def turn_off(self, **kwargs): """Turn device off.""" self.power_plug.turn_off() def update(self): """Get the latest data.""" self.power_plug = self.api.get_power_plug(self.unique_id)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/spider/switch.py
"""Asuswrt status sensors.""" from __future__ import annotations import logging from numbers import Number from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import DATA_GIGABYTES, DATA_RATE_MEGABITS_PER_SECOND from homeassistant.helpers.typing import HomeAssistantType from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, ) from .const import ( DATA_ASUSWRT, DOMAIN, SENSOR_CONNECTED_DEVICE, SENSOR_RX_BYTES, SENSOR_RX_RATES, SENSOR_TX_BYTES, SENSOR_TX_RATES, ) from .router import KEY_COORDINATOR, KEY_SENSORS, AsusWrtRouter DEFAULT_PREFIX = "Asuswrt" SENSOR_DEVICE_CLASS = "device_class" SENSOR_ICON = "icon" SENSOR_NAME = "name" SENSOR_UNIT = "unit" SENSOR_FACTOR = "factor" SENSOR_DEFAULT_ENABLED = "default_enabled" UNIT_DEVICES = "Devices" CONNECTION_SENSORS = { SENSOR_CONNECTED_DEVICE: { SENSOR_NAME: "Devices Connected", SENSOR_UNIT: UNIT_DEVICES, SENSOR_FACTOR: 0, SENSOR_ICON: "mdi:router-network", SENSOR_DEVICE_CLASS: None, SENSOR_DEFAULT_ENABLED: True, }, SENSOR_RX_RATES: { SENSOR_NAME: "Download Speed", SENSOR_UNIT: DATA_RATE_MEGABITS_PER_SECOND, SENSOR_FACTOR: 125000, SENSOR_ICON: "mdi:download-network", SENSOR_DEVICE_CLASS: None, }, SENSOR_TX_RATES: { SENSOR_NAME: "Upload Speed", SENSOR_UNIT: DATA_RATE_MEGABITS_PER_SECOND, SENSOR_FACTOR: 125000, SENSOR_ICON: "mdi:upload-network", SENSOR_DEVICE_CLASS: None, }, SENSOR_RX_BYTES: { SENSOR_NAME: "Download", SENSOR_UNIT: DATA_GIGABYTES, SENSOR_FACTOR: 1000000000, SENSOR_ICON: "mdi:download", SENSOR_DEVICE_CLASS: None, }, SENSOR_TX_BYTES: { SENSOR_NAME: "Upload", SENSOR_UNIT: DATA_GIGABYTES, SENSOR_FACTOR: 1000000000, SENSOR_ICON: "mdi:upload", SENSOR_DEVICE_CLASS: None, }, } _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the sensors.""" router: AsusWrtRouter = hass.data[DOMAIN][entry.entry_id][DATA_ASUSWRT] entities = [] for sensor_data in router.sensors_coordinator.values(): coordinator = sensor_data[KEY_COORDINATOR] sensors = sensor_data[KEY_SENSORS] for sensor_key in sensors: if sensor_key in CONNECTION_SENSORS: entities.append( AsusWrtSensor( coordinator, router, sensor_key, CONNECTION_SENSORS[sensor_key] ) ) async_add_entities(entities, True) class AsusWrtSensor(CoordinatorEntity, SensorEntity): """Representation of a AsusWrt sensor.""" def __init__( self, coordinator: DataUpdateCoordinator, router: AsusWrtRouter, sensor_type: str, sensor: dict[str, any], ) -> None: """Initialize a AsusWrt sensor.""" super().__init__(coordinator) self._router = router self._sensor_type = sensor_type self._name = f"{DEFAULT_PREFIX} {sensor[SENSOR_NAME]}" self._unique_id = f"{DOMAIN} {self._name}" self._unit = sensor[SENSOR_UNIT] self._factor = sensor[SENSOR_FACTOR] self._icon = sensor[SENSOR_ICON] self._device_class = sensor[SENSOR_DEVICE_CLASS] self._default_enabled = sensor.get(SENSOR_DEFAULT_ENABLED, False) @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return self._default_enabled @property def state(self) -> str: """Return current state.""" state = self.coordinator.data.get(self._sensor_type) if state is None: return None if self._factor and isinstance(state, Number): return round(state / self._factor, 2) return state @property def unique_id(self) -> str: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name.""" return self._name @property def unit_of_measurement(self) -> str: """Return the unit.""" return self._unit @property def icon(self) -> str: """Return the icon.""" return self._icon @property def device_class(self) -> str: """Return the device_class.""" return self._device_class @property def extra_state_attributes(self) -> dict[str, any]: """Return the attributes.""" return {"hostname": self._router.host} @property def device_info(self) -> dict[str, any]: """Return the device information.""" return self._router.device_info
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/asuswrt/sensor.py
"""Support for exposing NX584 elements as sensors.""" import logging import threading import time from nx584 import client as nx584_client import requests import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASS_OPENING, DEVICE_CLASSES, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_HOST, CONF_PORT import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_EXCLUDE_ZONES = "exclude_zones" CONF_ZONE_TYPES = "zone_types" DEFAULT_HOST = "localhost" DEFAULT_PORT = "5007" DEFAULT_SSL = False ZONE_TYPES_SCHEMA = vol.Schema({cv.positive_int: vol.In(DEVICE_CLASSES)}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_EXCLUDE_ZONES, default=[]): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the NX584 binary sensor platform.""" host = config.get(CONF_HOST) port = config.get(CONF_PORT) exclude = config.get(CONF_EXCLUDE_ZONES) zone_types = config.get(CONF_ZONE_TYPES) try: client = nx584_client.Client(f"http://{host}:{port}") zones = client.list_zones() except requests.exceptions.ConnectionError as ex: _LOGGER.error("Unable to connect to NX584: %s", str(ex)) return False version = [int(v) for v in client.get_version().split(".")] if version < [1, 1]: _LOGGER.error("NX584 is too old to use for sensors (>=0.2 required)") return False zone_sensors = { zone["number"]: NX584ZoneSensor( zone, zone_types.get(zone["number"], DEVICE_CLASS_OPENING) ) for zone in zones if zone["number"] not in exclude } if zone_sensors: add_entities(zone_sensors.values()) watcher = NX584Watcher(client, zone_sensors) watcher.start() else: _LOGGER.warning("No zones found on NX584") return True class NX584ZoneSensor(BinarySensorEntity): """Representation of a NX584 zone as a sensor.""" def __init__(self, zone, zone_type): """Initialize the nx594 binary sensor.""" self._zone = zone self._zone_type = zone_type @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return self._zone_type @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the name of the binary sensor.""" return self._zone["name"] @property def is_on(self): """Return true if the binary sensor is on.""" # True means "faulted" or "open" or "abnormal state" return self._zone["state"] @property def extra_state_attributes(self): """Return the state attributes.""" return {"zone_number": self._zone["number"]} class NX584Watcher(threading.Thread): """Event listener thread to process NX584 events.""" def __init__(self, client, zone_sensors): """Initialize NX584 watcher thread.""" super().__init__() self.daemon = True self._client = client self._zone_sensors = zone_sensors def _process_zone_event(self, event): zone = event["zone"] zone_sensor = self._zone_sensors.get(zone) # pylint: disable=protected-access if not zone_sensor: return zone_sensor._zone["state"] = event["zone_state"] zone_sensor.schedule_update_ha_state() def _process_events(self, events): for event in events: if event.get("type") == "zone_status": self._process_zone_event(event) def _run(self): """Throw away any existing events so we don't replay history.""" self._client.get_events() while True: events = self._client.get_events() if events: self._process_events(events) def run(self): """Run the watcher.""" while True: try: self._run() except requests.exceptions.ConnectionError: _LOGGER.error("Failed to reach NX584 server") time.sleep(10)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/nx584/binary_sensor.py
"""Representation of Z-Wave binary sensors.""" from __future__ import annotations import logging from typing import Callable, TypedDict from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import CommandClass from homeassistant.components.binary_sensor import ( DEVICE_CLASS_BATTERY, DEVICE_CLASS_DOOR, DEVICE_CLASS_GAS, DEVICE_CLASS_HEAT, DEVICE_CLASS_LOCK, DEVICE_CLASS_MOISTURE, DEVICE_CLASS_MOTION, DEVICE_CLASS_PROBLEM, DEVICE_CLASS_SAFETY, DEVICE_CLASS_SMOKE, DEVICE_CLASS_SOUND, DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .const import DATA_CLIENT, DATA_UNSUBSCRIBE, DOMAIN from .discovery import ZwaveDiscoveryInfo from .entity import ZWaveBaseEntity LOGGER = logging.getLogger(__name__) NOTIFICATION_SMOKE_ALARM = 1 NOTIFICATION_CARBON_MONOOXIDE = 2 NOTIFICATION_CARBON_DIOXIDE = 3 NOTIFICATION_HEAT = 4 NOTIFICATION_WATER = 5 NOTIFICATION_ACCESS_CONTROL = 6 NOTIFICATION_HOME_SECURITY = 7 NOTIFICATION_POWER_MANAGEMENT = 8 NOTIFICATION_SYSTEM = 9 NOTIFICATION_EMERGENCY = 10 NOTIFICATION_CLOCK = 11 NOTIFICATION_APPLIANCE = 12 NOTIFICATION_HOME_HEALTH = 13 NOTIFICATION_SIREN = 14 NOTIFICATION_WATER_VALVE = 15 NOTIFICATION_WEATHER = 16 NOTIFICATION_IRRIGATION = 17 NOTIFICATION_GAS = 18 class NotificationSensorMapping(TypedDict, total=False): """Represent a notification sensor mapping dict type.""" type: int # required states: list[str] device_class: str enabled: bool # Mappings for Notification sensors # https://github.com/zwave-js/node-zwave-js/blob/master/packages/config/config/notifications.json NOTIFICATION_SENSOR_MAPPINGS: list[NotificationSensorMapping] = [ { # NotificationType 1: Smoke Alarm - State Id's 1 and 2 - Smoke detected "type": NOTIFICATION_SMOKE_ALARM, "states": ["1", "2"], "device_class": DEVICE_CLASS_SMOKE, }, { # NotificationType 1: Smoke Alarm - All other State Id's "type": NOTIFICATION_SMOKE_ALARM, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 2: Carbon Monoxide - State Id's 1 and 2 "type": NOTIFICATION_CARBON_MONOOXIDE, "states": ["1", "2"], "device_class": DEVICE_CLASS_GAS, }, { # NotificationType 2: Carbon Monoxide - All other State Id's "type": NOTIFICATION_CARBON_MONOOXIDE, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 3: Carbon Dioxide - State Id's 1 and 2 "type": NOTIFICATION_CARBON_DIOXIDE, "states": ["1", "2"], "device_class": DEVICE_CLASS_GAS, }, { # NotificationType 3: Carbon Dioxide - All other State Id's "type": NOTIFICATION_CARBON_DIOXIDE, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 4: Heat - State Id's 1, 2, 5, 6 (heat/underheat) "type": NOTIFICATION_HEAT, "states": ["1", "2", "5", "6"], "device_class": DEVICE_CLASS_HEAT, }, { # NotificationType 4: Heat - All other State Id's "type": NOTIFICATION_HEAT, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 5: Water - State Id's 1, 2, 3, 4 "type": NOTIFICATION_WATER, "states": ["1", "2", "3", "4"], "device_class": DEVICE_CLASS_MOISTURE, }, { # NotificationType 5: Water - All other State Id's "type": NOTIFICATION_WATER, "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 6: Access Control - State Id's 1, 2, 3, 4 (Lock) "type": NOTIFICATION_ACCESS_CONTROL, "states": ["1", "2", "3", "4"], "device_class": DEVICE_CLASS_LOCK, }, { # NotificationType 6: Access Control - State Id 16 (door/window open) "type": NOTIFICATION_ACCESS_CONTROL, "states": ["22"], "device_class": DEVICE_CLASS_DOOR, }, { # NotificationType 6: Access Control - State Id 17 (door/window closed) "type": NOTIFICATION_ACCESS_CONTROL, "states": ["23"], "enabled": False, }, { # NotificationType 7: Home Security - State Id's 1, 2 (intrusion) "type": NOTIFICATION_HOME_SECURITY, "states": ["1", "2"], "device_class": DEVICE_CLASS_SAFETY, }, { # NotificationType 7: Home Security - State Id's 3, 4, 9 (tampering) "type": NOTIFICATION_HOME_SECURITY, "states": ["3", "4", "9"], "device_class": DEVICE_CLASS_SAFETY, }, { # NotificationType 7: Home Security - State Id's 5, 6 (glass breakage) "type": NOTIFICATION_HOME_SECURITY, "states": ["5", "6"], "device_class": DEVICE_CLASS_SAFETY, }, { # NotificationType 7: Home Security - State Id's 7, 8 (motion) "type": NOTIFICATION_HOME_SECURITY, "states": ["7", "8"], "device_class": DEVICE_CLASS_MOTION, }, { # NotificationType 9: System - State Id's 1, 2, 6, 7 "type": NOTIFICATION_SYSTEM, "states": ["1", "2", "6", "7"], "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 10: Emergency - State Id's 1, 2, 3 "type": NOTIFICATION_EMERGENCY, "states": ["1", "2", "3"], "device_class": DEVICE_CLASS_PROBLEM, }, { # NotificationType 14: Siren "type": NOTIFICATION_SIREN, "states": ["1"], "device_class": DEVICE_CLASS_SOUND, }, { # NotificationType 18: Gas "type": NOTIFICATION_GAS, "states": ["1", "2", "3", "4"], "device_class": DEVICE_CLASS_GAS, }, { # NotificationType 18: Gas "type": NOTIFICATION_GAS, "states": ["6"], "device_class": DEVICE_CLASS_PROBLEM, }, ] PROPERTY_DOOR_STATUS = "doorStatus" class PropertySensorMapping(TypedDict, total=False): """Represent a property sensor mapping dict type.""" property_name: str # required on_states: list[str] # required device_class: str enabled: bool # Mappings for property sensors PROPERTY_SENSOR_MAPPINGS: list[PropertySensorMapping] = [ { "property_name": PROPERTY_DOOR_STATUS, "on_states": ["open"], "device_class": DEVICE_CLASS_DOOR, "enabled": True, }, ] async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable ) -> None: """Set up Z-Wave binary sensor from config entry.""" client: ZwaveClient = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT] @callback def async_add_binary_sensor(info: ZwaveDiscoveryInfo) -> None: """Add Z-Wave Binary Sensor.""" entities: list[BinarySensorEntity] = [] if info.platform_hint == "notification": # Get all sensors from Notification CC states for state_key in info.primary_value.metadata.states: # ignore idle key (0) if state_key == "0": continue entities.append( ZWaveNotificationBinarySensor(config_entry, client, info, state_key) ) elif info.platform_hint == "property": entities.append(ZWavePropertyBinarySensor(config_entry, client, info)) else: # boolean sensor entities.append(ZWaveBooleanBinarySensor(config_entry, client, info)) async_add_entities(entities) hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append( async_dispatcher_connect( hass, f"{DOMAIN}_{config_entry.entry_id}_add_{BINARY_SENSOR_DOMAIN}", async_add_binary_sensor, ) ) class ZWaveBooleanBinarySensor(ZWaveBaseEntity, BinarySensorEntity): """Representation of a Z-Wave binary_sensor.""" def __init__( self, config_entry: ConfigEntry, client: ZwaveClient, info: ZwaveDiscoveryInfo, ) -> None: """Initialize a ZWaveBooleanBinarySensor entity.""" super().__init__(config_entry, client, info) self._name = self.generate_name(include_value_name=True) @property def is_on(self) -> bool | None: """Return if the sensor is on or off.""" if self.info.primary_value.value is None: return None return bool(self.info.primary_value.value) @property def device_class(self) -> str | None: """Return device class.""" if self.info.primary_value.command_class == CommandClass.BATTERY: return DEVICE_CLASS_BATTERY return None @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" # Legacy binary sensors are phased out (replaced by notification sensors) # Disable by default to not confuse users return bool( self.info.primary_value.command_class != CommandClass.SENSOR_BINARY or self.info.node.device_class.generic.key == 0x20 ) class ZWaveNotificationBinarySensor(ZWaveBaseEntity, BinarySensorEntity): """Representation of a Z-Wave binary_sensor from Notification CommandClass.""" def __init__( self, config_entry: ConfigEntry, client: ZwaveClient, info: ZwaveDiscoveryInfo, state_key: str, ) -> None: """Initialize a ZWaveNotificationBinarySensor entity.""" super().__init__(config_entry, client, info) self.state_key = state_key self._name = self.generate_name( include_value_name=True, alternate_value_name=self.info.primary_value.property_name, additional_info=[self.info.primary_value.metadata.states[self.state_key]], ) # check if we have a custom mapping for this value self._mapping_info = self._get_sensor_mapping() @property def is_on(self) -> bool | None: """Return if the sensor is on or off.""" if self.info.primary_value.value is None: return None return int(self.info.primary_value.value) == int(self.state_key) @property def device_class(self) -> str | None: """Return device class.""" return self._mapping_info.get("device_class") @property def unique_id(self) -> str: """Return unique id for this entity.""" return f"{super().unique_id}.{self.state_key}" @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" if not self._mapping_info: return True return self._mapping_info.get("enabled", True) @callback def _get_sensor_mapping(self) -> NotificationSensorMapping: """Try to get a device specific mapping for this sensor.""" for mapping in NOTIFICATION_SENSOR_MAPPINGS: if ( mapping["type"] != self.info.primary_value.metadata.cc_specific["notificationType"] ): continue if not mapping.get("states") or self.state_key in mapping["states"]: # match found return mapping return {} class ZWavePropertyBinarySensor(ZWaveBaseEntity, BinarySensorEntity): """Representation of a Z-Wave binary_sensor from a property.""" def __init__( self, config_entry: ConfigEntry, client: ZwaveClient, info: ZwaveDiscoveryInfo ) -> None: """Initialize a ZWavePropertyBinarySensor entity.""" super().__init__(config_entry, client, info) # check if we have a custom mapping for this value self._mapping_info = self._get_sensor_mapping() self._name = self.generate_name(include_value_name=True) @property def is_on(self) -> bool | None: """Return if the sensor is on or off.""" if self.info.primary_value.value is None: return None return self.info.primary_value.value in self._mapping_info["on_states"] @property def device_class(self) -> str | None: """Return device class.""" return self._mapping_info.get("device_class") @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" # We hide some more advanced sensors by default to not overwhelm users # unless explicitly stated in a mapping, assume deisabled by default return self._mapping_info.get("enabled", False) @callback def _get_sensor_mapping(self) -> PropertySensorMapping: """Try to get a device specific mapping for this sensor.""" mapping_info = PropertySensorMapping() for mapping in PROPERTY_SENSOR_MAPPINGS: if mapping["property_name"] == self.info.primary_value.property_name: mapping_info = mapping.copy() break return mapping_info
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/zwave_js/binary_sensor.py
"""Support for Ness D8X/D16X devices.""" from collections import namedtuple import datetime from nessclient import ArmingState, Client import voluptuous as vol from homeassistant.components.binary_sensor import DEVICE_CLASSES from homeassistant.const import ( ATTR_CODE, ATTR_STATE, CONF_HOST, CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.helpers import config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send DOMAIN = "ness_alarm" DATA_NESS = "ness_alarm" CONF_DEVICE_PORT = "port" CONF_INFER_ARMING_STATE = "infer_arming_state" CONF_ZONES = "zones" CONF_ZONE_NAME = "name" CONF_ZONE_TYPE = "type" CONF_ZONE_ID = "id" ATTR_OUTPUT_ID = "output_id" DEFAULT_ZONES = [] DEFAULT_SCAN_INTERVAL = datetime.timedelta(minutes=1) DEFAULT_INFER_ARMING_STATE = False SIGNAL_ZONE_CHANGED = "ness_alarm.zone_changed" SIGNAL_ARMING_STATE_CHANGED = "ness_alarm.arming_state_changed" ZoneChangedData = namedtuple("ZoneChangedData", ["zone_id", "state"]) DEFAULT_ZONE_TYPE = "motion" ZONE_SCHEMA = vol.Schema( { vol.Required(CONF_ZONE_NAME): cv.string, vol.Required(CONF_ZONE_ID): cv.positive_int, vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): vol.In(DEVICE_CLASSES), } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_DEVICE_PORT): cv.port, vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): cv.positive_time_period, vol.Optional(CONF_ZONES, default=DEFAULT_ZONES): vol.All( cv.ensure_list, [ZONE_SCHEMA] ), vol.Optional( CONF_INFER_ARMING_STATE, default=DEFAULT_INFER_ARMING_STATE ): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) SERVICE_PANIC = "panic" SERVICE_AUX = "aux" SERVICE_SCHEMA_PANIC = vol.Schema({vol.Required(ATTR_CODE): cv.string}) SERVICE_SCHEMA_AUX = vol.Schema( { vol.Required(ATTR_OUTPUT_ID): cv.positive_int, vol.Optional(ATTR_STATE, default=True): cv.boolean, } ) async def async_setup(hass, config): """Set up the Ness Alarm platform.""" conf = config[DOMAIN] zones = conf[CONF_ZONES] host = conf[CONF_HOST] port = conf[CONF_DEVICE_PORT] scan_interval = conf[CONF_SCAN_INTERVAL] infer_arming_state = conf[CONF_INFER_ARMING_STATE] client = Client( host=host, port=port, loop=hass.loop, update_interval=scan_interval.total_seconds(), infer_arming_state=infer_arming_state, ) hass.data[DATA_NESS] = client async def _close(event): await client.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close) hass.async_create_task( async_load_platform(hass, "binary_sensor", DOMAIN, {CONF_ZONES: zones}, config) ) hass.async_create_task( async_load_platform(hass, "alarm_control_panel", DOMAIN, {}, config) ) def on_zone_change(zone_id: int, state: bool): """Receives and propagates zone state updates.""" async_dispatcher_send( hass, SIGNAL_ZONE_CHANGED, ZoneChangedData(zone_id=zone_id, state=state) ) def on_state_change(arming_state: ArmingState): """Receives and propagates arming state updates.""" async_dispatcher_send(hass, SIGNAL_ARMING_STATE_CHANGED, arming_state) client.on_zone_change(on_zone_change) client.on_state_change(on_state_change) # Force update for current arming status and current zone states hass.loop.create_task(client.keepalive()) hass.loop.create_task(client.update()) async def handle_panic(call): await client.panic(call.data[ATTR_CODE]) async def handle_aux(call): await client.aux(call.data[ATTR_OUTPUT_ID], call.data[ATTR_STATE]) hass.services.async_register( DOMAIN, SERVICE_PANIC, handle_panic, schema=SERVICE_SCHEMA_PANIC ) hass.services.async_register( DOMAIN, SERVICE_AUX, handle_aux, schema=SERVICE_SCHEMA_AUX ) return True
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/ness_alarm/__init__.py
"""Sonos specific exceptions.""" from homeassistant.components.media_player.errors import BrowseError class UnknownMediaType(BrowseError): """Unknown media type."""
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/sonos/exception.py
"""Closures channels module for Zigbee Home Automation.""" import zigpy.zcl.clusters.closures as closures from homeassistant.core import callback from .. import registries from ..const import REPORT_CONFIG_IMMEDIATE, SIGNAL_ATTR_UPDATED from .base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.DoorLock.cluster_id) class DoorLockChannel(ZigbeeChannel): """Door lock channel.""" _value_attribute = 0 REPORT_CONFIG = ({"attr": "lock_state", "config": REPORT_CONFIG_IMMEDIATE},) async def async_update(self): """Retrieve latest state.""" result = await self.get_attribute_value("lock_state", from_cache=True) if result is not None: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", 0, "lock_state", result ) @callback def cluster_command(self, tsn, command_id, args): """Handle a cluster command received on this cluster.""" if ( self._cluster.client_commands is None or self._cluster.client_commands.get(command_id) is None ): return command_name = self._cluster.client_commands.get(command_id, [command_id])[0] if command_name == "operation_event_notification": self.zha_send_event( command_name, { "source": args[0].name, "operation": args[1].name, "code_slot": (args[2] + 1), # start code slots at 1 }, ) @callback def attribute_updated(self, attrid, value): """Handle attribute update from lock cluster.""" attr_name = self.cluster.attributes.get(attrid, [attrid])[0] self.debug( "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value ) if attrid == self._value_attribute: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value ) async def async_set_user_code(self, code_slot: int, user_code: str) -> None: """Set the user code for the code slot.""" await self.set_pin_code( code_slot - 1, # start code slots at 1, Zigbee internals use 0 closures.DoorLock.UserStatus.Enabled, closures.DoorLock.UserType.Unrestricted, user_code, ) async def async_enable_user_code(self, code_slot: int) -> None: """Enable the code slot.""" await self.set_user_status(code_slot - 1, closures.DoorLock.UserStatus.Enabled) async def async_disable_user_code(self, code_slot: int) -> None: """Disable the code slot.""" await self.set_user_status(code_slot - 1, closures.DoorLock.UserStatus.Disabled) async def async_get_user_code(self, code_slot: int) -> int: """Get the user code from the code slot.""" result = await self.get_pin_code(code_slot - 1) return result async def async_clear_user_code(self, code_slot: int) -> None: """Clear the code slot.""" await self.clear_pin_code(code_slot - 1) async def async_clear_all_user_codes(self) -> None: """Clear all code slots.""" await self.clear_all_pin_codes() async def async_set_user_type(self, code_slot: int, user_type: str) -> None: """Set user type.""" await self.set_user_type(code_slot - 1, user_type) async def async_get_user_type(self, code_slot: int) -> str: """Get user type.""" result = await self.get_user_type(code_slot - 1) return result @registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.Shade.cluster_id) class Shade(ZigbeeChannel): """Shade channel.""" @registries.CLIENT_CHANNELS_REGISTRY.register(closures.WindowCovering.cluster_id) class WindowCoveringClient(ClientChannel): """Window client channel.""" @registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.WindowCovering.cluster_id) class WindowCovering(ZigbeeChannel): """Window channel.""" _value_attribute = 8 REPORT_CONFIG = ( {"attr": "current_position_lift_percentage", "config": REPORT_CONFIG_IMMEDIATE}, ) async def async_update(self): """Retrieve latest state.""" result = await self.get_attribute_value( "current_position_lift_percentage", from_cache=False ) self.debug("read current position: %s", result) if result is not None: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", 8, "current_position_lift_percentage", result, ) @callback def attribute_updated(self, attrid, value): """Handle attribute update from window_covering cluster.""" attr_name = self.cluster.attributes.get(attrid, [attrid])[0] self.debug( "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value ) if attrid == self._value_attribute: self.async_send_signal( f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value )
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/zha/core/channels/closures.py
"""Get ride details and liveboard details for NMBS (Belgian railway).""" import logging from pyrail import iRail import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_NAME, CONF_SHOW_ON_MAP, TIME_MINUTES, ) import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "NMBS" DEFAULT_ICON = "mdi:train" DEFAULT_ICON_ALERT = "mdi:alert-octagon" CONF_STATION_FROM = "station_from" CONF_STATION_TO = "station_to" CONF_STATION_LIVE = "station_live" CONF_EXCLUDE_VIAS = "exclude_vias" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STATION_FROM): cv.string, vol.Required(CONF_STATION_TO): cv.string, vol.Optional(CONF_STATION_LIVE): cv.string, vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean, } ) def get_time_until(departure_time=None): """Calculate the time between now and a train's departure time.""" if departure_time is None: return 0 delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now() return round(delta.total_seconds() / 60) def get_delay_in_minutes(delay=0): """Get the delay in minutes from a delay in seconds.""" return round(int(delay) / 60) def get_ride_duration(departure_time, arrival_time, delay=0): """Calculate the total travel time in minutes.""" duration = dt_util.utc_from_timestamp( int(arrival_time) ) - dt_util.utc_from_timestamp(int(departure_time)) duration_time = int(round(duration.total_seconds() / 60)) return duration_time + get_delay_in_minutes(delay) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the NMBS sensor with iRail API.""" api_client = iRail() name = config[CONF_NAME] show_on_map = config[CONF_SHOW_ON_MAP] station_from = config[CONF_STATION_FROM] station_to = config[CONF_STATION_TO] station_live = config.get(CONF_STATION_LIVE) excl_vias = config[CONF_EXCLUDE_VIAS] sensors = [ NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias) ] if station_live is not None: sensors.append( NMBSLiveBoard(api_client, station_live, station_from, station_to) ) add_entities(sensors, True) class NMBSLiveBoard(SensorEntity): """Get the next train from a station's liveboard.""" def __init__(self, api_client, live_station, station_from, station_to): """Initialize the sensor for getting liveboard data.""" self._station = live_station self._api_client = api_client self._station_from = station_from self._station_to = station_to self._attrs = {} self._state = None @property def name(self): """Return the sensor default name.""" return f"NMBS Live ({self._station})" @property def unique_id(self): """Return a unique ID.""" unique_id = f"{self._station}_{self._station_from}_{self._station_to}" return f"nmbs_live_{unique_id}" @property def icon(self): """Return the default icon or an alert icon if delays.""" if self._attrs and int(self._attrs["delay"]) > 0: return DEFAULT_ICON_ALERT return DEFAULT_ICON @property def state(self): """Return sensor state.""" return self._state @property def extra_state_attributes(self): """Return the sensor attributes if data is available.""" if self._state is None or not self._attrs: return None delay = get_delay_in_minutes(self._attrs["delay"]) departure = get_time_until(self._attrs["time"]) attrs = { "departure": f"In {departure} minutes", "departure_minutes": departure, "extra_train": int(self._attrs["isExtra"]) > 0, "vehicle_id": self._attrs["vehicle"], "monitored_station": self._station, ATTR_ATTRIBUTION: "https://api.irail.be/", } if delay > 0: attrs["delay"] = f"{delay} minutes" attrs["delay_minutes"] = delay return attrs def update(self): """Set the state equal to the next departure.""" liveboard = self._api_client.get_liveboard(self._station) if liveboard is None or not liveboard["departures"]: return next_departure = liveboard["departures"]["departure"][0] self._attrs = next_departure self._state = ( f"Track {next_departure['platform']} - {next_departure['station']}" ) class NMBSSensor(SensorEntity): """Get the the total travel time for a given connection.""" def __init__( self, api_client, name, show_on_map, station_from, station_to, excl_vias ): """Initialize the NMBS connection sensor.""" self._name = name self._show_on_map = show_on_map self._api_client = api_client self._station_from = station_from self._station_to = station_to self._excl_vias = excl_vias self._attrs = {} self._state = None @property def name(self): """Return the name of the sensor.""" return self._name @property def unit_of_measurement(self): """Return the unit of measurement.""" return TIME_MINUTES @property def icon(self): """Return the sensor default icon or an alert icon if any delay.""" if self._attrs: delay = get_delay_in_minutes(self._attrs["departure"]["delay"]) if delay > 0: return "mdi:alert-octagon" return "mdi:train" @property def extra_state_attributes(self): """Return sensor attributes if data is available.""" if self._state is None or not self._attrs: return None delay = get_delay_in_minutes(self._attrs["departure"]["delay"]) departure = get_time_until(self._attrs["departure"]["time"]) attrs = { "departure": f"In {departure} minutes", "departure_minutes": departure, "destination": self._station_to, "direction": self._attrs["departure"]["direction"]["name"], "platform_arriving": self._attrs["arrival"]["platform"], "platform_departing": self._attrs["departure"]["platform"], "vehicle_id": self._attrs["departure"]["vehicle"], ATTR_ATTRIBUTION: "https://api.irail.be/", } if self._show_on_map and self.station_coordinates: attrs[ATTR_LATITUDE] = self.station_coordinates[0] attrs[ATTR_LONGITUDE] = self.station_coordinates[1] if self.is_via_connection and not self._excl_vias: via = self._attrs["vias"]["via"][0] attrs["via"] = via["station"] attrs["via_arrival_platform"] = via["arrival"]["platform"] attrs["via_transfer_platform"] = via["departure"]["platform"] attrs["via_transfer_time"] = get_delay_in_minutes( via["timeBetween"] ) + get_delay_in_minutes(via["departure"]["delay"]) if delay > 0: attrs["delay"] = f"{delay} minutes" attrs["delay_minutes"] = delay return attrs @property def state(self): """Return the state of the device.""" return self._state @property def station_coordinates(self): """Get the lat, long coordinates for station.""" if self._state is None or not self._attrs: return [] latitude = float(self._attrs["departure"]["stationinfo"]["locationY"]) longitude = float(self._attrs["departure"]["stationinfo"]["locationX"]) return [latitude, longitude] @property def is_via_connection(self): """Return whether the connection goes through another station.""" if not self._attrs: return False return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0 def update(self): """Set the state to the duration of a connection.""" connections = self._api_client.get_connections( self._station_from, self._station_to ) if connections is None or not connections["connection"]: return if int(connections["connection"][0]["departure"]["left"]) > 0: next_connection = connections["connection"][1] else: next_connection = connections["connection"][0] self._attrs = next_connection if self._excl_vias and self.is_via_connection: _LOGGER.debug( "Skipping update of NMBSSensor \ because this connection is a via" ) return duration = get_ride_duration( next_connection["departure"]["time"], next_connection["arrival"]["time"], next_connection["departure"]["delay"], ) self._state = duration
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/nmbs/sensor.py
"""Support for Netgear Arlo IP cameras.""" import logging from haffmpeg.camera import CameraMjpeg import voluptuous as vol from homeassistant.components.camera import PLATFORM_SCHEMA, Camera from homeassistant.components.ffmpeg import DATA_FFMPEG from homeassistant.const import ATTR_BATTERY_LEVEL from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import DATA_ARLO, DEFAULT_BRAND, SIGNAL_UPDATE_ARLO _LOGGER = logging.getLogger(__name__) ARLO_MODE_ARMED = "armed" ARLO_MODE_DISARMED = "disarmed" ATTR_BRIGHTNESS = "brightness" ATTR_FLIPPED = "flipped" ATTR_MIRRORED = "mirrored" ATTR_MOTION = "motion_detection_sensitivity" ATTR_POWERSAVE = "power_save_mode" ATTR_SIGNAL_STRENGTH = "signal_strength" ATTR_UNSEEN_VIDEOS = "unseen_videos" ATTR_LAST_REFRESH = "last_refresh" CONF_FFMPEG_ARGUMENTS = "ffmpeg_arguments" DEFAULT_ARGUMENTS = "-pred 1" POWERSAVE_MODE_MAPPING = {1: "best_battery_life", 2: "optimized", 3: "best_video"} PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up an Arlo IP Camera.""" arlo = hass.data[DATA_ARLO] cameras = [] for camera in arlo.cameras: cameras.append(ArloCam(hass, camera, config)) add_entities(cameras) class ArloCam(Camera): """An implementation of a Netgear Arlo IP camera.""" def __init__(self, hass, camera, device_info): """Initialize an Arlo camera.""" super().__init__() self._camera = camera self._name = self._camera.name self._motion_status = False self._ffmpeg = hass.data[DATA_FFMPEG] self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS) self._last_refresh = None self.attrs = {} def camera_image(self): """Return a still image response from the camera.""" return self._camera.last_image_from_cache async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_UPDATE_ARLO, self.async_write_ha_state ) ) async def handle_async_mjpeg_stream(self, request): """Generate an HTTP MJPEG stream from the camera.""" video = await self.hass.async_add_executor_job( getattr, self._camera, "last_video" ) if not video: error_msg = ( f"Video not found for {self.name}. " f"Is it older than {self._camera.min_days_vdo_cache} days?" ) _LOGGER.error(error_msg) return stream = CameraMjpeg(self._ffmpeg.binary) await stream.open_camera(video.video_url, extra_cmd=self._ffmpeg_arguments) try: stream_reader = await stream.get_reader() return await async_aiohttp_proxy_stream( self.hass, request, stream_reader, self._ffmpeg.ffmpeg_stream_content_type, ) finally: await stream.close() @property def name(self): """Return the name of this camera.""" return self._name @property def extra_state_attributes(self): """Return the state attributes.""" return { name: value for name, value in ( (ATTR_BATTERY_LEVEL, self._camera.battery_level), (ATTR_BRIGHTNESS, self._camera.brightness), (ATTR_FLIPPED, self._camera.flip_state), (ATTR_MIRRORED, self._camera.mirror_state), (ATTR_MOTION, self._camera.motion_detection_sensitivity), ( ATTR_POWERSAVE, POWERSAVE_MODE_MAPPING.get(self._camera.powersave_mode), ), (ATTR_SIGNAL_STRENGTH, self._camera.signal_strength), (ATTR_UNSEEN_VIDEOS, self._camera.unseen_videos), ) if value is not None } @property def model(self): """Return the camera model.""" return self._camera.model_id @property def brand(self): """Return the camera brand.""" return DEFAULT_BRAND @property def motion_detection_enabled(self): """Return the camera motion detection status.""" return self._motion_status def set_base_station_mode(self, mode): """Set the mode in the base station.""" # Get the list of base stations identified by library base_stations = self.hass.data[DATA_ARLO].base_stations # Some Arlo cameras does not have base station # So check if there is base station detected first # if yes, then choose the primary base station # Set the mode on the chosen base station if base_stations: primary_base_station = base_stations[0] primary_base_station.mode = mode def enable_motion_detection(self): """Enable the Motion detection in base station (Arm).""" self._motion_status = True self.set_base_station_mode(ARLO_MODE_ARMED) def disable_motion_detection(self): """Disable the motion detection in base station (Disarm).""" self._motion_status = False self.set_base_station_mode(ARLO_MODE_DISARMED)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/arlo/camera.py
"""Support for Z-Wave fans.""" import math from homeassistant.components.fan import ( DOMAIN as FAN_DOMAIN, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.util.percentage import ( int_states_in_range, percentage_to_ranged_value, ranged_value_to_percentage, ) from .const import DATA_UNSUBSCRIBE, DOMAIN from .entity import ZWaveDeviceEntity SUPPORTED_FEATURES = SUPPORT_SET_SPEED SPEED_RANGE = (1, 99) # off is not included async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Fan from Config Entry.""" @callback def async_add_fan(values): """Add Z-Wave Fan.""" fan = ZwaveFan(values) async_add_entities([fan]) hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append( async_dispatcher_connect(hass, f"{DOMAIN}_new_{FAN_DOMAIN}", async_add_fan) ) class ZwaveFan(ZWaveDeviceEntity, FanEntity): """Representation of a Z-Wave fan.""" async def async_set_percentage(self, percentage): """Set the speed percentage of the fan.""" if percentage is None: # Value 255 tells device to return to previous value zwave_speed = 255 elif percentage == 0: zwave_speed = 0 else: zwave_speed = math.ceil(percentage_to_ranged_value(SPEED_RANGE, percentage)) self.values.primary.send_value(zwave_speed) async def async_turn_on( self, speed=None, percentage=None, preset_mode=None, **kwargs ): """Turn the device on.""" await self.async_set_percentage(percentage) async def async_turn_off(self, **kwargs): """Turn the device off.""" self.values.primary.send_value(0) @property def is_on(self): """Return true if device is on (speed above 0).""" return self.values.primary.value > 0 @property def percentage(self): """Return the current speed. The Z-Wave speed value is a byte 0-255. 255 means previous value. The normal range of the speed is 0-99. 0 means off. """ return ranged_value_to_percentage(SPEED_RANGE, self.values.primary.value) @property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" return int_states_in_range(SPEED_RANGE) @property def supported_features(self): """Flag supported features.""" return SUPPORTED_FEATURES
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/ozw/fan.py
"""Config flow for Ring integration.""" import logging from oauthlib.oauth2 import AccessDeniedError, MissingTokenError from ring_doorbell import Auth import voluptuous as vol from homeassistant import config_entries, const, core, exceptions from . import DOMAIN _LOGGER = logging.getLogger(__name__) async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect.""" auth = Auth(f"HomeAssistant/{const.__version__}") try: token = await hass.async_add_executor_job( auth.fetch_token, data["username"], data["password"], data.get("2fa"), ) except MissingTokenError as err: raise Require2FA from err except AccessDeniedError as err: raise InvalidAuth from err return token class RingConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Ring.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL user_pass = None async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: token = await validate_input(self.hass, user_input) await self.async_set_unique_id(user_input["username"]) return self.async_create_entry( title=user_input["username"], data={"username": user_input["username"], "token": token}, ) except Require2FA: self.user_pass = user_input return await self.async_step_2fa() except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return self.async_show_form( step_id="user", data_schema=vol.Schema({"username": str, "password": str}), errors=errors, ) async def async_step_2fa(self, user_input=None): """Handle 2fa step.""" if user_input: return await self.async_step_user({**self.user_pass, **user_input}) return self.async_show_form( step_id="2fa", data_schema=vol.Schema({"2fa": str}), ) class Require2FA(exceptions.HomeAssistantError): """Error to indicate we require 2FA.""" class InvalidAuth(exceptions.HomeAssistantError): """Error to indicate there is invalid auth."""
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/ring/config_flow.py
"""Describe logbook events.""" from homeassistant.components.logbook import LazyEventPartialState from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME from homeassistant.core import HomeAssistant, callback from . import ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_TRIGGERED @callback def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore """Describe logbook events.""" @callback def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore """Describe a logbook event.""" data = event.data message = "has been triggered" if ATTR_SOURCE in data: message = f"{message} by {data[ATTR_SOURCE]}" return { "name": data.get(ATTR_NAME), "message": message, "source": data.get(ATTR_SOURCE), "entity_id": data.get(ATTR_ENTITY_ID), "context_id": event.context_id, } async_describe_event( DOMAIN, EVENT_AUTOMATION_TRIGGERED, async_describe_logbook_event )
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/automation/logbook.py
"""Interface implementation for cloud client.""" from __future__ import annotations import asyncio import logging from pathlib import Path from typing import Any import aiohttp from hass_nabucasa.client import CloudClient as Interface from homeassistant.components.alexa import ( errors as alexa_errors, smart_home as alexa_sh, ) from homeassistant.components.google_assistant import const as gc, smart_home as ga from homeassistant.const import HTTP_OK from homeassistant.core import Context, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later from homeassistant.util.aiohttp import MockRequest from . import alexa_config, google_config, utils from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN from .prefs import CloudPreferences class CloudClient(Interface): """Interface class for Home Assistant Cloud.""" def __init__( self, hass: HomeAssistant, prefs: CloudPreferences, websession: aiohttp.ClientSession, alexa_user_config: dict[str, Any], google_user_config: dict[str, Any], ): """Initialize client interface to Cloud.""" self._hass = hass self._prefs = prefs self._websession = websession self.google_user_config = google_user_config self.alexa_user_config = alexa_user_config self._alexa_config = None self._google_config = None @property def base_path(self) -> Path: """Return path to base dir.""" return Path(self._hass.config.config_dir) @property def prefs(self) -> CloudPreferences: """Return Cloud preferences.""" return self._prefs @property def loop(self) -> asyncio.BaseEventLoop: """Return client loop.""" return self._hass.loop @property def websession(self) -> aiohttp.ClientSession: """Return client session for aiohttp.""" return self._websession @property def aiohttp_runner(self) -> aiohttp.web.AppRunner: """Return client webinterface aiohttp application.""" return self._hass.http.runner @property def cloudhooks(self) -> dict[str, dict[str, str]]: """Return list of cloudhooks.""" return self._prefs.cloudhooks @property def remote_autostart(self) -> bool: """Return true if we want start a remote connection.""" return self._prefs.remote_enabled async def get_alexa_config(self) -> alexa_config.AlexaConfig: """Return Alexa config.""" if self._alexa_config is None: assert self.cloud is not None cloud_user = await self._prefs.get_cloud_user() self._alexa_config = alexa_config.AlexaConfig( self._hass, self.alexa_user_config, cloud_user, self._prefs, self.cloud ) return self._alexa_config async def get_google_config(self) -> google_config.CloudGoogleConfig: """Return Google config.""" if not self._google_config: assert self.cloud is not None cloud_user = await self._prefs.get_cloud_user() self._google_config = google_config.CloudGoogleConfig( self._hass, self.google_user_config, cloud_user, self._prefs, self.cloud ) await self._google_config.async_initialize() return self._google_config async def logged_in(self) -> None: """When user logs in.""" is_new_user = await self.prefs.async_set_username(self.cloud.username) async def enable_alexa(_): """Enable Alexa.""" aconf = await self.get_alexa_config() try: await aconf.async_enable_proactive_mode() except aiohttp.ClientError as err: # If no internet available yet if self._hass.is_running: logging.getLogger(__package__).warning( "Unable to activate Alexa Report State: %s. Retrying in 30 seconds", err, ) async_call_later(self._hass, 30, enable_alexa) except alexa_errors.NoTokenAvailable: pass async def enable_google(_): """Enable Google.""" gconf = await self.get_google_config() gconf.async_enable_local_sdk() if gconf.should_report_state: gconf.async_enable_report_state() if is_new_user: await gconf.async_sync_entities(gconf.agent_user_id) tasks = [] if self._prefs.alexa_enabled and self._prefs.alexa_report_state: tasks.append(enable_alexa) if self._prefs.google_enabled: tasks.append(enable_google) if tasks: await asyncio.gather(*[task(None) for task in tasks]) async def cleanups(self) -> None: """Cleanup some stuff after logout.""" await self.prefs.async_set_username(None) self._google_config = None @callback def user_message(self, identifier: str, title: str, message: str) -> None: """Create a message for user to UI.""" self._hass.components.persistent_notification.async_create( message, title, identifier ) @callback def dispatcher_message(self, identifier: str, data: Any = None) -> None: """Match cloud notification to dispatcher.""" if identifier.startswith("remote_"): async_dispatcher_send(self._hass, DISPATCHER_REMOTE_UPDATE, data) async def async_alexa_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: """Process cloud alexa message to client.""" cloud_user = await self._prefs.get_cloud_user() aconfig = await self.get_alexa_config() return await alexa_sh.async_handle_message( self._hass, aconfig, payload, context=Context(user_id=cloud_user), enabled=self._prefs.alexa_enabled, ) async def async_google_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: """Process cloud google message to client.""" if not self._prefs.google_enabled: return ga.turned_off_response(payload) gconf = await self.get_google_config() return await ga.async_handle_message( self._hass, gconf, gconf.cloud_user, payload, gc.SOURCE_CLOUD ) async def async_webhook_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: """Process cloud webhook message to client.""" cloudhook_id = payload["cloudhook_id"] found = None for cloudhook in self._prefs.cloudhooks.values(): if cloudhook["cloudhook_id"] == cloudhook_id: found = cloudhook break if found is None: return {"status": HTTP_OK} request = MockRequest( content=payload["body"].encode("utf-8"), headers=payload["headers"], method=payload["method"], query_string=payload["query"], mock_source=DOMAIN, ) response = await self._hass.components.webhook.async_handle_webhook( found["webhook_id"], request ) response_dict = utils.aiohttp_serialize_response(response) body = response_dict.get("body") return { "body": body, "status": response_dict["status"], "headers": {"Content-Type": response.content_type}, } async def async_cloudhooks_update(self, data: dict[str, dict[str, str]]) -> None: """Update local list of cloudhooks.""" await self._prefs.async_update(cloudhooks=data)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/cloud/client.py
"""Config flow for Elk-M1 Control integration.""" import asyncio import logging from urllib.parse import urlparse import elkm1_lib as elkm1 import voluptuous as vol from homeassistant import config_entries, exceptions from homeassistant.const import ( CONF_ADDRESS, CONF_HOST, CONF_PASSWORD, CONF_PREFIX, CONF_PROTOCOL, CONF_TEMPERATURE_UNIT, CONF_USERNAME, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.util import slugify from . import async_wait_for_elk_to_sync from .const import CONF_AUTO_CONFIGURE, DOMAIN _LOGGER = logging.getLogger(__name__) PROTOCOL_MAP = {"secure": "elks://", "non-secure": "elk://", "serial": "serial://"} DATA_SCHEMA = vol.Schema( { vol.Required(CONF_PROTOCOL, default="secure"): vol.In( ["secure", "non-secure", "serial"] ), vol.Required(CONF_ADDRESS): str, vol.Optional(CONF_USERNAME, default=""): str, vol.Optional(CONF_PASSWORD, default=""): str, vol.Optional(CONF_PREFIX, default=""): str, vol.Optional(CONF_TEMPERATURE_UNIT, default=TEMP_FAHRENHEIT): vol.In( [TEMP_FAHRENHEIT, TEMP_CELSIUS] ), } ) VALIDATE_TIMEOUT = 35 async def validate_input(data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ userid = data.get(CONF_USERNAME) password = data.get(CONF_PASSWORD) prefix = data[CONF_PREFIX] url = _make_url_from_data(data) requires_password = url.startswith("elks://") if requires_password and (not userid or not password): raise InvalidAuth elk = elkm1.Elk( {"url": url, "userid": userid, "password": password, "element_list": ["panel"]} ) elk.connect() if not await async_wait_for_elk_to_sync(elk, VALIDATE_TIMEOUT, url): raise InvalidAuth device_name = data[CONF_PREFIX] if data[CONF_PREFIX] else "ElkM1" # Return info that you want to store in the config entry. return {"title": device_name, CONF_HOST: url, CONF_PREFIX: slugify(prefix)} def _make_url_from_data(data): host = data.get(CONF_HOST) if host: return host protocol = PROTOCOL_MAP[data[CONF_PROTOCOL]] address = data[CONF_ADDRESS] return f"{protocol}{address}" class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Elk-M1 Control.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH def __init__(self): """Initialize the elkm1 config flow.""" self.importing = False async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: if self._url_already_configured(_make_url_from_data(user_input)): return self.async_abort(reason="address_already_configured") try: info = await validate_input(user_input) except asyncio.TimeoutError: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if "base" not in errors: await self.async_set_unique_id(user_input[CONF_PREFIX]) self._abort_if_unique_id_configured() if self.importing: return self.async_create_entry(title=info["title"], data=user_input) return self.async_create_entry( title=info["title"], data={ CONF_HOST: info[CONF_HOST], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_AUTO_CONFIGURE: True, CONF_TEMPERATURE_UNIT: user_input[CONF_TEMPERATURE_UNIT], CONF_PREFIX: info[CONF_PREFIX], }, ) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) async def async_step_import(self, user_input): """Handle import.""" self.importing = True return await self.async_step_user(user_input) def _url_already_configured(self, url): """See if we already have a elkm1 matching user input configured.""" existing_hosts = { urlparse(entry.data[CONF_HOST]).hostname for entry in self._async_current_entries() } return urlparse(url).hostname in existing_hosts class InvalidAuth(exceptions.HomeAssistantError): """Error to indicate there is invalid auth."""
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/elkm1/config_flow.py
"""Support for HDMI CEC devices as switches.""" import logging from homeassistant.components.switch import DOMAIN, SwitchEntity from homeassistant.const import STATE_OFF, STATE_ON, STATE_STANDBY from . import ATTR_NEW, CecEntity _LOGGER = logging.getLogger(__name__) ENTITY_ID_FORMAT = DOMAIN + ".{}" def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return HDMI devices as switches.""" if ATTR_NEW in discovery_info: _LOGGER.info("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) entities = [] for device in discovery_info[ATTR_NEW]: hdmi_device = hass.data.get(device) entities.append(CecSwitchEntity(hdmi_device, hdmi_device.logical_address)) add_entities(entities, True) class CecSwitchEntity(CecEntity, SwitchEntity): """Representation of a HDMI device as a Switch.""" def __init__(self, device, logical) -> None: """Initialize the HDMI device.""" CecEntity.__init__(self, device, logical) self.entity_id = f"{DOMAIN}.hdmi_{hex(self._logical_address)[2:]}" def turn_on(self, **kwargs) -> None: """Turn device on.""" self._device.turn_on() self._state = STATE_ON self.schedule_update_ha_state(force_refresh=False) def turn_off(self, **kwargs) -> None: """Turn device off.""" self._device.turn_off() self._state = STATE_OFF self.schedule_update_ha_state(force_refresh=False) def toggle(self, **kwargs): """Toggle the entity.""" self._device.toggle() if self._state == STATE_ON: self._state = STATE_OFF else: self._state = STATE_ON self.schedule_update_ha_state(force_refresh=False) @property def is_on(self) -> bool: """Return True if entity is on.""" return self._state == STATE_ON @property def is_standby(self): """Return true if device is in standby.""" return self._state == STATE_OFF or self._state == STATE_STANDBY @property def state(self) -> str: """Return the cached state of device.""" return self._state
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/hdmi_cec/switch.py
"""Support for SMS notification services.""" import logging import gammu # pylint: disable=import-error import voluptuous as vol from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService from homeassistant.const import CONF_NAME, CONF_RECIPIENT import homeassistant.helpers.config_validation as cv from .const import DOMAIN, SMS_GATEWAY _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_RECIPIENT): cv.string, vol.Optional(CONF_NAME): cv.string} ) def get_service(hass, config, discovery_info=None): """Get the SMS notification service.""" if SMS_GATEWAY not in hass.data[DOMAIN]: _LOGGER.error("SMS gateway not found, cannot initialize service") return gateway = hass.data[DOMAIN][SMS_GATEWAY] if discovery_info is None: number = config[CONF_RECIPIENT] else: number = discovery_info[CONF_RECIPIENT] return SMSNotificationService(gateway, number) class SMSNotificationService(BaseNotificationService): """Implement the notification service for SMS.""" def __init__(self, gateway, number): """Initialize the service.""" self.gateway = gateway self.number = number async def async_send_message(self, message="", **kwargs): """Send SMS message.""" smsinfo = { "Class": -1, "Unicode": False, "Entries": [{"ID": "ConcatenatedTextLong", "Buffer": message}], } try: # Encode messages encoded = gammu.EncodeSMS(smsinfo) except gammu.GSMError as exc: _LOGGER.error("Encoding message %s failed: %s", message, exc) return # Send messages for encoded_message in encoded: # Fill in numbers encoded_message["SMSC"] = {"Location": 1} encoded_message["Number"] = self.number try: # Actually send the message await self.gateway.send_sms_async(encoded_message) except gammu.GSMError as exc: _LOGGER.error("Sending to %s failed: %s", self.number, exc)
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/sms/notify.py
"""Support for Fibaro binary sensors.""" from homeassistant.components.binary_sensor import ( DEVICE_CLASS_DOOR, DEVICE_CLASS_MOTION, DEVICE_CLASS_SMOKE, DEVICE_CLASS_WINDOW, DOMAIN, BinarySensorEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_ICON from . import FIBARO_DEVICES, FibaroDevice SENSOR_TYPES = { "com.fibaro.floodSensor": ["Flood", "mdi:water", "flood"], "com.fibaro.motionSensor": ["Motion", "mdi:run", DEVICE_CLASS_MOTION], "com.fibaro.doorSensor": ["Door", "mdi:window-open", DEVICE_CLASS_DOOR], "com.fibaro.windowSensor": ["Window", "mdi:window-open", DEVICE_CLASS_WINDOW], "com.fibaro.smokeSensor": ["Smoke", "mdi:smoking", DEVICE_CLASS_SMOKE], "com.fibaro.FGMS001": ["Motion", "mdi:run", DEVICE_CLASS_MOTION], "com.fibaro.heatDetector": ["Heat", "mdi:fire", "heat"], } def setup_platform(hass, config, add_entities, discovery_info=None): """Perform the setup for Fibaro controller devices.""" if discovery_info is None: return add_entities( [ FibaroBinarySensor(device) for device in hass.data[FIBARO_DEVICES]["binary_sensor"] ], True, ) class FibaroBinarySensor(FibaroDevice, BinarySensorEntity): """Representation of a Fibaro Binary Sensor.""" def __init__(self, fibaro_device): """Initialize the binary_sensor.""" self._state = None super().__init__(fibaro_device) self.entity_id = f"{DOMAIN}.{self.ha_id}" stype = None devconf = fibaro_device.device_config if fibaro_device.type in SENSOR_TYPES: stype = fibaro_device.type elif fibaro_device.baseType in SENSOR_TYPES: stype = fibaro_device.baseType if stype: self._device_class = SENSOR_TYPES[stype][2] self._icon = SENSOR_TYPES[stype][1] else: self._device_class = None self._icon = None # device_config overrides: self._device_class = devconf.get(CONF_DEVICE_CLASS, self._device_class) self._icon = devconf.get(CONF_ICON, self._icon) @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def device_class(self): """Return the device class of the sensor.""" return self._device_class @property def is_on(self): """Return true if sensor is on.""" return self._state def update(self): """Get the latest data and update the state.""" self._state = self.current_binary_state
"""The tests for the automation component.""" import asyncio import logging from unittest.mock import Mock, patch import pytest from homeassistant.components import logbook import homeassistant.components.automation as automation from homeassistant.components.automation import ( ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_RELOADED, EVENT_AUTOMATION_TRIGGERED, SERVICE_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, EVENT_HOMEASSISTANT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, CoreState, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_capture_events, async_mock_service, mock_restore_cache, ) from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_service_data_not_a_dict(hass, calls): """Test service data not dict.""" with assert_setup_component(0, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "data": 100}, } }, ) async def test_service_specify_data(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": { "some": "{{ trigger.platform }} - " "{{ trigger.event.event_type }}" }, }, } }, ) time = dt_util.utcnow() with patch("homeassistant.helpers.script.utcnow", return_value=time): hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "event - test_event" state = hass.states.get("automation.hello") assert state is not None assert state.attributes.get("last_triggered") == time async def test_service_specify_entity_id(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list(hass, calls): """Test service data.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [ {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], "action": {"service": "test.automation"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() assert len(calls) == 2 async def test_trigger_service_ignoring_condition(hass, caplog, calls): """Test triggers.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, "action": {"service": "test.automation"}, } }, ) caplog.clear() caplog.set_level(logging.WARNING) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test"}, blocking=True ) assert len(calls) == 1 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": True}, blocking=True, ) assert len(calls) == 2 await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.test", "skip_condition": False}, blocking=True, ) assert len(calls) == 2 async def test_two_conditions_with_and(hass, calls): """Test two and conditions.""" entity_id = "test.entity" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", "entity_id": entity_id, "below": 150, }, ], "action": {"service": "test.automation"}, } }, ) hass.states.async_set(entity_id, 100) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 101) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set(entity_id, 151) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_shorthand_conditions_template(hass, calls): """Test shorthand nation form in conditions.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", "action": {"service": "test.automation"}, } }, ) hass.states.async_set("test.entity", "hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.states.async_set("test.entity", "goodbye") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_list_setting(hass, calls): """Event is not a valid condition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": {"service": "test.automation"}, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 async def test_automation_calling_two_actions(hass, calls): """Test if we can call two actions from automation async definition.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"service": "test.automation", "data": {"position": 0}}, {"service": "test.automation", "data": {"position": 1}}, ], } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 assert calls[0].data["position"] == 0 assert calls[1].data["position"] == 1 async def test_shared_context(hass, calls): """Test that the shared context is passed down the chain.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"event": "test_event2"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": {"service": "test.automation"}, }, ] }, ) context = Context() first_automation_listener = Mock() event_mock = Mock() hass.bus.async_listen("test_event2", first_automation_listener) hass.bus.async_listen(EVENT_AUTOMATION_TRIGGERED, event_mock) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() # Ensure events was fired assert first_automation_listener.call_count == 1 assert event_mock.call_count == 2 # Verify automation triggered evenet for 'hello' automation args, _ = event_mock.call_args_list[0] first_trigger_context = args[0].context assert first_trigger_context.parent_id == context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure context set correctly for event fired by 'hello' automation args, _ = first_automation_listener.call_args assert args[0].context is first_trigger_context # Ensure the 'hello' automation state has the right context state = hass.states.get("automation.hello") assert state is not None assert state.context is first_trigger_context # Verify automation triggered evenet for 'bye' automation args, _ = event_mock.call_args_list[1] second_trigger_context = args[0].context assert second_trigger_context.parent_id == first_trigger_context.id # Ensure event data has all attributes set assert args[0].data.get(ATTR_NAME) is not None assert args[0].data.get(ATTR_ENTITY_ID) is not None assert args[0].data.get(ATTR_SOURCE) is not None # Ensure the service call from the second automation # shares the same context assert len(calls) == 1 assert calls[0].context is second_trigger_context async def test_services(hass, calls): """Test the automation services for turning entities on/off.""" entity_id = "automation.hello" assert hass.states.get(entity_id) is None assert not automation.is_on(hass, entity_id) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, } }, ) assert hass.states.get(entity_id) is not None assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, { ATTR_ENTITY_ID: entity_id, }, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert not automation.is_on(hass, entity_id) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 await hass.services.async_call( automation.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 3 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.services.async_call( automation.DOMAIN, SERVICE_TRIGGER, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert len(calls) == 4 await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert automation.is_on(hass, entity_id) async def test_reload_config_service(hass, calls, hass_admin_user, hass_read_only_user): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None assert hass.states.get("automation.bye") is None listeners = hass.bus.async_listeners() assert listeners.get("test_event") == 1 assert listeners.get("test_event2") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" test_reload_event = async_capture_events(hass, EVENT_AUTOMATION_RELOADED) with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={ automation.DOMAIN: { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ): with pytest.raises(Unauthorized): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_read_only_user.id), blocking=True, ) await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, context=Context(user_id=hass_admin_user.id), blocking=True, ) # De-flake ?! await hass.async_block_till_done() assert len(test_reload_event) == 1 assert hass.states.get("automation.hello") is None assert hass.states.get("automation.bye") is not None listeners = hass.bus.async_listeners() assert listeners.get("test_event") is None assert listeners.get("test_event2") == 1 hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data.get("event") == "test_event2" async def test_reload_config_when_invalid_config(hass, calls): """Test the reload config service handling invalid config.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value={automation.DOMAIN: "not valid"}, ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_reload_config_handles_load_fails(hass, calls): """Test the reload config service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } }, ) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data.get("event") == "test_event" with patch( "homeassistant.config.load_yaml_config_file", side_effect=HomeAssistantError("bla"), ): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) assert hass.states.get("automation.hello") is not None hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 2 @pytest.mark.parametrize("service", ["turn_off_stop", "turn_off_no_stop", "reload"]) async def test_automation_stops(hass, calls, service): """Test that turning off / reloading stops any running actions as appropriate.""" entity_id = "automation.hello" test_entity = "test.entity" config = { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"service": "test.automation"}, ], } } assert await async_setup_component(hass, automation.DOMAIN, config) running = asyncio.Event() @callback def running_cb(event): running.set() hass.bus.async_listen_once("running", running_cb) hass.states.async_set(test_entity, "hello") hass.bus.async_fire("test_event") await running.wait() if service == "turn_off_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True, ) elif service == "turn_off_no_stop": await hass.services.async_call( automation.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id, automation.CONF_STOP_ACTIONS: False}, blocking=True, ) else: with patch( "homeassistant.config.load_yaml_config_file", autospec=True, return_value=config, ): await hass.services.async_call( automation.DOMAIN, SERVICE_RELOAD, blocking=True ) hass.states.async_set(test_entity, "goodbye") await hass.async_block_till_done() assert len(calls) == (1 if service == "turn_off_no_stop" else 0) async def test_automation_restore_state(hass): """Ensure states are restored on startup.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, "action": {"service": "test.automation"}, }, ] } assert await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time calls = async_mock_service(hass, "test", "automation") assert automation.is_on(hass, "automation.bye") is False hass.bus.async_fire("test_event_bye") await hass.async_block_till_done() assert len(calls) == 0 assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event_hello") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off(hass): """Test initial value off.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on(hass): """Test initial value on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } }, ) assert automation.is_on(hass, "automation.hello") await hass.async_start() await hass.async_block_till_done() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_initial_value_off_but_restore_on(hass): """Test initial value off and restored state is turned on.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_ON),)) await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") await hass.async_start() hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_initial_value_on_but_restore_off(hass): """Test initial value on and restored state is turned off.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_no_initial_value_and_restore_off(hass): """Test initial value off and restored state is turned on.""" calls = async_mock_service(hass, "test", "automation") mock_restore_cache(hass, (State("automation.hello", STATE_OFF),)) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert not automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 async def test_automation_is_on_if_no_initial_state_or_restore(hass): """Test initial value is on when no initial state or restored state.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 async def test_automation_not_trigger_on_bootstrap(hass): """Test if automation is not trigger on bootstrap.""" hass.state = CoreState.not_running calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert automation.is_on(hass, "automation.hello") hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_automation_bad_trigger(hass, caplog): """Test bad trigger configuration.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "automation"}, "action": [], } }, ) assert "Integration 'automation' does not provide trigger support." in caplog.text async def test_automation_with_error_in_script(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "Service not found" in caplog.text assert "Traceback" not in caplog.text async def test_automation_with_error_in_script_2(hass, caplog): """Test automation with an error in script.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": None, "entity_id": "hello.world"}, } }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert "string value is None" in caplog.text async def test_automation_restore_last_triggered_with_initial_state(hass): """Ensure last_triggered is restored, even when initial state is set.""" time = dt_util.utcnow() mock_restore_cache( hass, ( State("automation.hello", STATE_ON), State("automation.bye", STATE_ON, {"last_triggered": time}), State("automation.solong", STATE_OFF, {"last_triggered": time}), ), ) config = { automation.DOMAIN: [ { "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ] } await async_setup_component(hass, automation.DOMAIN, config) state = hass.states.get("automation.hello") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] is None state = hass.states.get("automation.bye") assert state assert state.state == STATE_OFF assert state.attributes["last_triggered"] == time state = hass.states.get("automation.solong") assert state assert state.state == STATE_ON assert state.attributes["last_triggered"] == time async def test_extraction_functions(hass): """Test extraction functions.""" assert await async_setup_component( hass, DOMAIN, { DOMAIN: [ { "alias": "test1", "trigger": {"platform": "state", "entity_id": "sensor.trigger_1"}, "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, ], }, { "alias": "test2", "trigger": { "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": "trigger-device-2", }, "condition": { "condition": "device", "device_id": "condition-device", "domain": "light", "type": "is_on", "entity_id": "light.bla", }, "action": [ { "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { "condition": "state", "entity_id": "sensor.condition", "state": "100", }, {"scene": "scene.hello"}, { "domain": "light", "device_id": "device-in-both", "entity_id": "light.bla", "type": "turn_on", }, { "domain": "light", "device_id": "device-in-last", "entity_id": "light.bla", "type": "turn_on", }, ], }, ] }, ) assert set(automation.automations_with_entity(hass, "light.in_both")) == { "automation.test1", "automation.test2", } assert set(automation.entities_in_automation(hass, "automation.test1")) == { "sensor.trigger_1", "light.condition_state", "light.in_both", "light.in_first", } assert set(automation.automations_with_device(hass, "device-in-both")) == { "automation.test1", "automation.test2", } assert set(automation.devices_in_automation(hass, "automation.test2")) == { "trigger-device-2", "condition-device", "device-in-both", "device-in-last", } async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) await async_setup_component(hass, "logbook", {}) entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, { ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation", ATTR_SOURCE: "source of trigger", }, ), ], entity_attr_cache, {}, ) ) assert event1["name"] == "Hello Automation" assert event1["domain"] == "automation" assert event1["message"] == "has been triggered" assert event1["entity_id"] == "automation.hello" assert event2["name"] == "Bye Automation" assert event2["domain"] == "automation" assert event2["message"] == "has been triggered by source of trigger" assert event2["entity_id"] == "automation.bye" async def test_automation_variables(hass, caplog): """Test automation variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "test_var": "defined_in_config", "event_type": "{{ trigger.event.event_type }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 1 hass.bus.async_fire("test_event_2", {"pass_condition": True}) await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" not in caplog.text hass.bus.async_fire("test_event_3") await hass.async_block_till_done() assert len(calls) == 2 assert "Error rendering variables" in caplog.text hass.bus.async_fire("test_event_3", {"break": 0}) await hass.async_block_till_done() assert len(calls) == 3 async def test_automation_trigger_variables(hass, caplog): """Test automation trigger variables.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "variables": { "event_type": "{{ trigger.event.event_type }}", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, { "variables": { "event_type": "{{ trigger.event.event_type }}", "test_var": "overridden_in_config", }, "trigger_variables": { "test_var": "defined_in_config", }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", }, }, }, ] }, ) hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["value"] == "defined_in_config" assert calls[0].data["event_type"] == "test_event" hass.bus.async_fire("test_event_2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["value"] == "overridden_in_config" assert calls[1].data["event_type"] == "test_event_2" assert "Error rendering variables" not in caplog.text async def test_automation_bad_trigger_variables(hass, caplog): """Test automation trigger variables accessing hass is rejected.""" calls = async_mock_service(hass, "test", "automation") assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", }, }, ] }, ) hass.bus.async_fire("test_event") assert "Use of 'states' is not supported in limited templates" in caplog.text await hass.async_block_till_done() assert len(calls) == 0 async def test_blueprint_automation(hass, calls): """Test blueprint automation.""" assert await async_setup_component( hass, "automation", { "automation": { "use_blueprint": { "path": "test_event_service.yaml", "input": { "trigger_event": "blueprint_event", "service_to_call": "test.automation", }, } } }, ) hass.bus.async_fire("blueprint_event") await hass.async_block_till_done() assert len(calls) == 1 assert automation.entities_in_automation(hass, "automation.automation_0") == [ "light.kitchen" ] async def test_trigger_service(hass, calls): """Test the automation trigger service.""" assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } }, ) context = Context() await hass.services.async_call( "automation", "trigger", {"entity_id": "automation.hello"}, blocking=True, context=context, ) assert len(calls) == 1 assert calls[0].data.get("trigger") == {"platform": None} assert calls[0].context.parent_id is context.id
adrienbrault/home-assistant
tests/components/automation/test_init.py
homeassistant/components/fibaro/binary_sensor.py
from chainer import backend from chainer import function_node from chainer.utils import type_check class FFT(function_node.FunctionNode): """Fast Fourier transform.""" def __init__(self, method): self._method = method def check_type_forward(self, in_types): type_check._argname(in_types, ('real', 'imag')) r_type, i_type = in_types type_check.expect( r_type.dtype.kind == 'f', r_type.ndim > 0, r_type.shape == i_type.shape, r_type.dtype == i_type.dtype, ) def forward(self, inputs): xp = backend.get_array_module(*inputs) real, imag = inputs x = real + imag * 1j y = getattr(xp.fft, self._method)(x) real_y = y.real.astype(real.dtype, copy=False) imag_y = y.imag.astype(imag.dtype, copy=False) return real_y, imag_y def backward(self, inputs, grads): gr, gi = grads xp = backend.get_array_module(*grads) if gr is None: gr = xp.zeros_like(gi.data) if gi is None: gi = xp.zeros_like(gr.data) gxi, gxr = FFT(self._method).apply((gi, gr)) return gxr, gxi def fft(x): """Fast Fourier transform. Args: x (tuple): ``(real, imag)`` where ``real`` is a :class:`~chainer.Variable` storing the real part and ``imag`` is a :class:`~chainer.Variable` storing the imaginary part. Returns: tuple: Returns ``(ry, ri)`` where ``ry`` is the real part of the result and ``ri`` is the imaginary part of the result. .. note:: Currently this function supports a tuple as input. It will support a complex numbers directly in the future. """ real, imag = x return FFT('fft').apply((real, imag)) def ifft(x): """Inverse fast Fourier transform. Args: x (tuple): ``(real, imag)`` where ``real`` is a :class:`~chainer.Variable` storing the real part and ``imag`` is a :class:`~chainer.Variable` storing the imaginary part. Returns: tuple: Returns ``(ry, ri)`` where ``ry`` is the real part of the result and ``ri`` is the imaginary part of the result. .. note:: Currently this function supports a tuple as input. It will support a complex numbers directly in the future. """ real, imag = x return FFT('ifft').apply((real, imag))
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/functions/math/fft.py
# For backward compatibility from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.util import _never_fire_trigger # NOQA from chainer.training.util import get_trigger # NOQA
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/training/trigger.py
from chainer import backend from chainer.backends import cuda from chainer.backends import intel64 from chainer import optimizer _default_hyperparam = optimizer.Hyperparameter() _default_hyperparam.lr = 0.01 _default_hyperparam.momentum = 0.9 class CorrectedMomentumSGDRule(optimizer.UpdateRule): """Update rule for the corrected momentum SGD. See :class:`~chainer.optimizers.CorrectedMomentumSGD` for the default values of the hyperparameters. Args: parent_hyperparam (~chainer.optimizer.Hyperparameter): Hyperparameter that provides the default values. lr (float): Learning rate. momentum (float): Exponential decay rate of the first order moment. """ def __init__(self, parent_hyperparam=None, lr=None, momentum=None): super(CorrectedMomentumSGDRule, self).__init__( parent_hyperparam or _default_hyperparam) if lr is not None: self.hyperparam.lr = lr if momentum is not None: self.hyperparam.momentum = momentum def init_state(self, param): xp = backend.get_array_module(param.data) with cuda.get_device_from_array(param.data): self.state['v'] = xp.zeros_like(param.data) # For iDeep if (isinstance(param.data, intel64.mdarray) and intel64.inputs_all_ready((self.state['v'],))): self.state['v'] = intel64.ideep.array( self.state['v'], itype=intel64.ideep.wgt_array) def update_core_cpu(self, param): grad = param.grad if grad is None: return v = self.state['v'] if isinstance(v, intel64.mdarray): v.inplace_axpby(self.hyperparam.momentum, -1, grad) param.data += self.hyperparam.lr * v else: v *= self.hyperparam.momentum v -= grad param.data += self.hyperparam.lr * v def update_core_gpu(self, param): grad = param.grad if grad is None: return cuda.elementwise( 'T grad, T lr, T momentum', 'T param, T v', '''v = momentum * v - grad; param += lr * v;''', 'momentum_sgd')( grad, self.hyperparam.lr, self.hyperparam.momentum, param.data, self.state['v']) class CorrectedMomentumSGD(optimizer.GradientMethod): """Momentum SGD optimizer. This implements momentum correction discussed in the third section of `Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour \ <https://arxiv.org/abs/1706.02677>`_. :class:`~chainer.optimizers.MomentumSGD` implements the equation (10) of the paper. This optimizer implements the equation (9). To get better understanding between the two methods, we show the equivalence between the equation (9) and modification of the equation (10) that takes momentum correction into account. First, we set :math:`v_{t} = \\eta_{t} u_t`. We substitute this relation to the equation (10). .. math:: v_{t+1} &= m\\frac{\\eta_{t+1}}{\\eta_{t}}v_t + \\eta_{t+1}g_t \\\\ &= m\\frac{\\eta_{t+1}}{\\eta_{t}}\\eta_{t}u_t + \\eta_{t+1}g_t \\\\ &= \\eta_{t+1}(m u_t + g_t) \\\\ From this result, we derive :math:`u_{t+1} = m u_t + g_t`, which is how update tensors are calculated by :class:`~chainer.optimizers.CorrectedMomentumSGD`. Thus, the equivalence is shown. Args: lr (float): Learning rate. momentum (float): Exponential decay rate of the first order moment. """ def __init__(self, lr=_default_hyperparam.lr, momentum=_default_hyperparam.momentum): super(CorrectedMomentumSGD, self).__init__() self.hyperparam.lr = lr self.hyperparam.momentum = momentum lr = optimizer.HyperparameterProxy('lr') momentum = optimizer.HyperparameterProxy('momentum') def create_update_rule(self): return CorrectedMomentumSGDRule(self.hyperparam)
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/optimizers/corrected_momentum_sgd.py
from chainer.training.triggers import interval_trigger def get_trigger(trigger): """Gets a trigger object. Trigger object is a callable that accepts a :class:`~chainer.training.Trainer` object as an argument and returns a boolean value. When it returns True, various kinds of events can occur depending on the context in which the trigger is used. For example, if the trigger is passed to the :class:`~chainer.training.Trainer` as the `stop trigger`, the training loop breaks when the trigger returns True. If the trigger is passed to the :meth:`~chainer.training.Trainer.extend` method of a trainer, then the registered extension is invoked only when the trigger returns True. This function returns a trigger object based on the argument. If ``trigger`` is already a callable, it just returns the trigger. If ``trigger`` is ``None``, it returns a trigger that never fires. Otherwise, it passes the value to :class:`~chainer.training.triggers.IntervalTrigger`. Args: trigger: Trigger object. It can be either an already built trigger object (i.e., a callable object that accepts a trainer object and returns a bool value), or a tuple. In latter case, the tuple is passed to :class:`~chainer.training.triggers.IntervalTrigger`. Returns: ``trigger`` if it is a callable, otherwise a :class:`~chainer.training.triggers.IntervalTrigger` object made from ``trigger``. """ if callable(trigger): return trigger elif trigger is None: return _never_fire_trigger else: return interval_trigger.IntervalTrigger(*trigger) def _never_fire_trigger(trainer): return False
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/training/util.py
# import classes and functions from chainer.dataset.convert import concat_examples # NOQA from chainer.dataset.convert import ConcatWithAsyncTransfer # NOQA from chainer.dataset.convert import to_device # NOQA from chainer.dataset.dataset_mixin import DatasetMixin # NOQA from chainer.dataset.download import cache_or_load_file # NOQA from chainer.dataset.download import cached_download # NOQA from chainer.dataset.download import get_dataset_directory # NOQA from chainer.dataset.download import get_dataset_root # NOQA from chainer.dataset.download import set_dataset_root # NOQA from chainer.dataset.iterator import Iterator # NOQA
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/dataset/__init__.py
from chainer.functions.connection import convolution_2d def dilated_convolution_2d(x, W, b=None, stride=1, pad=0, dilate=1, cover_all=False): """Two-dimensional dilated convolution function. This is an implementation of two-dimensional dilated convolution in ConvNets. It takes three variables: the input image ``x``, the filter weight ``W``, and the bias vector ``b``. .. note:: You can also perform dilated convolution by passing ``dilate`` argument to :class:`chainer.functions.convolution_2d`. The functionality is the same. Notation: here is a notation for dimensionalities. - :math:`n` is the batch size. - :math:`c_I` and :math:`c_O` are the number of the input and output, respectively. - :math:`h` and :math:`w` are the height and width of the input image, respectively. - :math:`k_H` and :math:`k_W` are the height and width of the filters, respectively. Args: x (~chainer.Variable): Input variable of shape :math:`(n, c_I, h, w)`. W (~chainer.Variable): Weight variable of shape :math:`(c_O, c_I, k_H, k_W)`. b (~chainer.Variable): Bias variable of length :math:`c_O` (optional). stride (int or pair of ints): Stride of filter applications. ``stride=s`` and ``stride=(s, s)`` are equivalent. pad (int or pair of ints): Spatial padding width for input arrays. ``pad=p`` and ``pad=(p, p)`` are equivalent. dilate (int or pair of ints): Dilation factor of filter applications. ``dilate=d`` and ``dilate=(d, d)`` are equivalent. cover_all (bool): If ``True``, all spatial locations are convoluted into some output pixels. It may make the output size larger. Returns: ~chainer.Variable: Output variable. The two-dimensional dilated convolution function is defined as follows. Then the ``DilatedConvolution2D`` function computes correlations between filters and patches of size :math:`(k_H, k_W)` in ``x``. Patches here are extracted at intervals of the dilation factor. Note that correlation here is equivalent to the inner product between expanded vectors. Patches are extracted at intervals of the dilation factor and at positions shifted by multiples of ``stride`` from the first position ``-pad`` for each spatial axis. The right-most (or bottom-most) patches do not run over the padded spatial size. Let :math:`(s_Y, s_X)` be the stride of filter application, :math:`(p_H, p_W)` the spatial padding size, and :math:`(d_Y, d_X)` the dilation factor of filter application. Then, the output size :math:`(h_O, w_O)` is determined by the following equations: .. math:: h_O &= (h + 2p_H - k_H - (k_H - 1) * (d_Y - 1)) / s_Y + 1,\\\\ w_O &= (w + 2p_W - k_W - (k_W - 1) * (d_X - 1)) / s_X + 1. If the bias vector is given, then it is added to all spatial locations of the output of convolution. """ return convolution_2d.convolution_2d(x, W, b, stride, pad, cover_all, dilate=dilate)
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/functions/connection/dilated_convolution_2d.py
from chainer.functions.connection import linear from chainer import initializers from chainer import link from chainer import utils from chainer import variable class Linear(link.Link): """Linear layer (a.k.a.\\ fully-connected layer). This is a link that wraps the :func:`~chainer.functions.linear` function, and holds a weight matrix ``W`` and optionally a bias vector ``b`` as parameters. If ``initialW`` is left to the default value of ``None``, the weight matrix ``W`` is initialized with i.i.d. Gaussian samples, each of which has zero mean and deviation :math:`\\sqrt{1/\\text{in_size}}`. The bias vector ``b`` is of size ``out_size``. If the ``initial_bias`` is to left the default value of ``None``, each element is initialized as zero. If the ``nobias`` argument is set to ``True``, then this link does not hold a bias vector. Args: in_size (int or None): Dimension of input vectors. If unspecified or ``None``, parameter initialization will be deferred until the first forward data pass at which time the size will be determined. out_size (int): Dimension of output vectors. If only one value is passed for ``in_size`` and ``out_size``, that value will be used for the ``out_size`` dimension. nobias (bool): If ``True``, then this function does not use the bias. initialW (:ref:`initializer <initializer>`): Initializer to initialize the weight. When it is :class:`numpy.ndarray`, its ``ndim`` should be 2. If ``initialW`` is ``None``, then the weights are initialized with i.i.d. Gaussian samples, each of which has zero mean and deviation :math:`\\sqrt{1/\\text{in_size}}`. initial_bias (:ref:`initializer <initializer>`): Initializer to initialize the bias. If ``None``, the bias will be initialized to zero. When it is :class:`numpy.ndarray`, its ``ndim`` should be 1. .. seealso:: :func:`~chainer.functions.linear` Attributes: W (~chainer.Variable): Weight parameter. b (~chainer.Variable): Bias parameter. .. admonition:: Example There are several ways to make a Linear link. Define an input vector ``x`` as: >>> x = np.array([[0, 1, 2, 3, 4]], np.float32) 1. Give the first two arguments explicitly: Those numbers are considered as the input size and the output size. >>> l = L.Linear(5, 10) >>> y = l(x) >>> y.shape (1, 10) 2. Omit ``in_size`` (give the output size only as the first argument) or fill it with ``None``: In this case, the size of second axis of ``x`` is used as the input size. So the below two cases are the same. >>> l = L.Linear(10) >>> y = l(x) >>> y.shape (1, 10) >>> l = L.Linear(None, 10) >>> y = l(x) >>> y.shape (1, 10) When you omit the first argument, you need to specify the other subsequent arguments from ``nobias`` as keyword arguments. So the below two cases are the same. >>> l = L.Linear(None, 10, False, None, 0) >>> y = l(x) >>> y.shape (1, 10) >>> l = L.Linear(10, nobias=False, initialW=None, initial_bias=0) >>> y = l(x) >>> y.shape (1, 10) """ def __init__(self, in_size, out_size=None, nobias=False, initialW=None, initial_bias=None): super(Linear, self).__init__() if out_size is None: in_size, out_size = None, in_size self.out_size = out_size with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if in_size is not None: self._initialize_params(in_size) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer, out_size) def _initialize_params(self, in_size): self.W.initialize((self.out_size, in_size)) def forward(self, x, n_batch_axes=1): """Applies the linear layer. Args: x (~chainer.Variable): Batch of input vectors. n_batch_axes (int): The number of batch axes. The default is 1. The input variable is reshaped into (:math:`{\\rm n\\_batch\\_axes} + 1`)-dimensional tensor. This should be greater than 0. Returns: ~chainer.Variable: Output of the linear layer. """ if self.W.array is None: in_size = utils.size_of_shape(x.shape[1:]) self._initialize_params(in_size) return linear.linear(x, self.W, self.b, n_batch_axes=n_batch_axes)
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/links/connection/linear.py
import numpy from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check class Expm1(function_node.FunctionNode): @property def label(self): return 'expm1' def check_type_forward(self, in_types): type_check._argname(in_types, ('x',)) type_check.expect(in_types[0].dtype.kind == 'f') def forward_cpu(self, x): self.retain_outputs((0,)) return utils.force_array(numpy.expm1(x[0])), def forward_gpu(self, x): self.retain_outputs((0,)) return cuda.cupy.expm1(x[0]), def backward(self, indexes, gy): y = self.get_retained_outputs()[0] return (y + 1.0) * gy[0], def expm1(x): """Elementwise exponential minus one function.""" return Expm1().apply((x,))[0]
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/functions/math/exponential_m1.py
import numpy import chainer from chainer.functions.activation import maxout from chainer import initializer from chainer import link from chainer.links.connection import linear class Maxout(link.Chain): """Fully-connected maxout layer. Let ``M``, ``P`` and ``N`` be an input dimension, a pool size, and an output dimension, respectively. For an input vector :math:`x` of size ``M``, it computes .. math:: Y_{i} = \\mathrm{max}_{j} (W_{ij\\cdot}x + b_{ij}). Here :math:`W` is a weight tensor of shape ``(M, P, N)``, :math:`b` an optional bias vector of shape ``(M, P)`` and :math:`W_{ij\\cdot}` is a sub-vector extracted from :math:`W` by fixing first and second dimensions to :math:`i` and :math:`j`, respectively. Minibatch dimension is omitted in the above equation. As for the actual implementation, this chain has a Linear link with a ``(M * P, N)`` weight matrix and an optional ``M * P`` dimensional bias vector. Args: in_size (int): Dimension of input vectors. out_size (int): Dimension of output vectors. pool_size (int): Number of channels. initialW (:ref:`initializer <initializer>`): Initializer to initialize the weight. When it is :class:`numpy.ndarray`, its ``ndim`` should be 3. initial_bias (:ref:`initializer <initializer>`): Initializer to initialize the bias. If ``None``, the bias is omitted. When it is :class:`numpy.ndarray`, its ``ndim`` should be 2. Attributes: linear (~chainer.Link): The Linear link that performs affine transformation. .. seealso:: :func:`~chainer.functions.maxout` .. seealso:: Goodfellow, I., Warde-farley, D., Mirza, M., Courville, A., & Bengio, Y. (2013). Maxout Networks. In Proceedings of the 30th International Conference on Machine Learning (ICML-13) (pp. 1319-1327). `URL <http://jmlr.org/proceedings/papers/v28/goodfellow13.html>`_ """ def __init__(self, in_size, out_size, pool_size, initialW=None, initial_bias=0): super(Maxout, self).__init__() linear_out_size = out_size * pool_size if initialW is None or \ numpy.isscalar(initialW) or \ isinstance(initialW, initializer.Initializer): pass elif chainer.is_arrays_compatible([initialW]): if initialW.ndim != 3: raise ValueError('initialW.ndim should be 3') initialW = initialW.reshape(linear_out_size, in_size) elif callable(initialW): initialW_orig = initialW def initialW(array): array.shape = (out_size, pool_size, in_size) initialW_orig(array) array.shape = (linear_out_size, in_size) if initial_bias is None or \ numpy.isscalar(initial_bias) or \ isinstance(initial_bias, initializer.Initializer): pass elif chainer.is_arrays_compatible([initial_bias]): if initial_bias.ndim != 2: raise ValueError('initial_bias.ndim should be 2') initial_bias = initial_bias.reshape(linear_out_size) elif callable(initial_bias): initial_bias_orig = initial_bias def initial_bias(array): array.shape = (out_size, pool_size) initial_bias_orig(array) array.shape = linear_out_size, with self.init_scope(): self.linear = linear.Linear( in_size, linear_out_size, nobias=initial_bias is None, initialW=initialW, initial_bias=initial_bias) self.out_size = out_size self.pool_size = pool_size def forward(self, x): """Applies the maxout layer. Args: x (~chainer.Variable): Batch of input vectors. Returns: ~chainer.Variable: Output of the maxout layer. """ y = self.linear(x) return maxout.maxout(y, self.pool_size)
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/links/activation/maxout.py
from chainer import backend from chainer import function_node from chainer import utils from chainer.utils import type_check class Cosh(function_node.FunctionNode): @property def label(self): return 'cosh' def check_type_forward(self, in_types): type_check._argname(in_types, ('x',)) type_check.expect(in_types[0].dtype.kind == 'f') def forward(self, x): self.retain_inputs((0,)) xp = backend.get_array_module(*x) return utils.force_array(xp.cosh(x[0])), def backward(self, indexes, gy): x = self.get_retained_inputs() gx = sinh(x[0]) gx *= gy[0] return gx, def cosh(x): """Elementwise hyperbolic cosine function. .. math:: y_i = \\cosh x_i. Args: x (~chainer.Variable): Input variable. Returns: ~chainer.Variable: Output variable. """ return Cosh().apply((x,))[0] class Sinh(function_node.FunctionNode): @property def label(self): return 'sinh' def check_type_forward(self, in_types): type_check._argname(in_types, ('x',)) type_check.expect(in_types[0].dtype.kind == 'f') def forward(self, x): self.retain_inputs((0,)) xp = backend.get_array_module(*x) return utils.force_array(xp.sinh(x[0])), def backward(self, x, gy): x = self.get_retained_inputs() gx = cosh(x[0]) gx *= gy[0] return gx, def sinh(x): """Elementwise hyperbolic sine function. .. math:: y_i = \\sinh x_i. Args: x (~chainer.Variable): Input variable. Returns: ~chainer.Variable: Output variable. """ return Sinh().apply((x,))[0]
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/functions/math/hyperbolic.py
import numpy import six import chainer from chainer import backend from chainer.backends import intel64 from chainer import function_node from chainer.utils import collections_abc from chainer.utils import type_check import chainerx _numpy_split_ok = numpy.lib.NumpyVersion(numpy.__version__) >= '1.11.0' def _fix_numpy_split(ys, x, indices_or_sections, axis): """Make the output of np.split compatible with numpy >= 1.11""" if all(y.ndim == x.ndim for y in ys): return ys tmp = [len(t) for t in numpy.split( numpy.empty(x.shape[axis], dtype=numpy.int8), indices_or_sections, 0)] shape = list(x.shape) for i, t in enumerate(tmp): y = ys[i] if y.ndim != x.ndim: assert y.size == 0 shape[axis] = t ys[i] = y.reshape(shape) return ys def _get_indices_or_sections(indices_or_sections): """Checks and convert ``indices_or_sections`` argument Converted value is one of: 1-D numpy.ndarray, list, int, and NumPy int scalar. Returns: A binary tuple in which the 1st element is indices (sequence) and the 2nd element is sections (scalar). Only one of the two is not ``None`` and the other is ``None``. """ ios = indices_or_sections is_seq = False if isinstance(ios, numpy.ndarray): # numpy.ndarray if ios.dtype.kind != 'i' and ios.size > 0: # Note: numpy.array([]) (dtype is float64) should be accepted. raise TypeError('indices_or_sections must be integers') if ios.ndim >= 2: raise TypeError('indices_or_sections must be 1-D sequence') is_seq = ios.ndim != 0 elif isinstance(ios, collections_abc.Sequence): # Any sequence except numpy.ndarray ios = list(ios) is_seq = True elif isinstance(indices_or_sections, six.integer_types): # int pass else: raise TypeError( 'indices_or_sections must be integer or 1-D array.\n' 'Actual: {}'.format(type(indices_or_sections))) if is_seq and chainer.is_debug(): for p, n in six.moves.zip(ios, ios[1:]): if p > n: raise ValueError('indices_or_sections must be sorted') if is_seq: return ios, None else: return None, ios class SplitAxis(function_node.FunctionNode): """Function that splits multiple arrays along the specified axis.""" def __init__(self, indices_or_sections, axis): indices, sections = _get_indices_or_sections(indices_or_sections) assert (indices is None) != (sections is None) self.indices = indices self.sections = sections self.axis = axis def check_type_forward(self, in_types): type_check.expect(in_types.size() == 1) type_check.expect(in_types[0].ndim > self.axis) if self.indices is not None: indices = self.indices if len(indices) > 0: max_index = type_check.make_variable(indices[-1], 'max_index') type_check.expect(in_types[0].shape[self.axis] >= max_index) else: assert self.sections is not None sections = type_check.make_variable(self.sections, 'sections') type_check.expect(in_types[0].shape[self.axis] % sections == 0) @property def indices_or_sections(self): return self.indices if self.indices is not None else self.sections def forward_chainerx(self, inputs): x, = inputs return tuple(chainerx.split(x, self.indices_or_sections, self.axis)) def forward(self, inputs): # Currently iDeep only supports 4 dims if (intel64.should_use_ideep('>=auto') and intel64.inputs_all_ready(inputs, (4,)) and self._ideep_is_supported(inputs)): return self._forward_ideep(inputs) x, = inputs self._xp = backend.get_array_module(x) indices_or_sections = self.indices_or_sections ret = self._xp.split(x, indices_or_sections, self.axis) if self._xp == numpy and not _numpy_split_ok: ret = _fix_numpy_split(ret, x, indices_or_sections, self.axis) self._shapes = [r.shape for r in ret] return tuple(ret) def _ideep_is_supported(self, inputs): # Returns True if iDeep supports current configuration of inputs and # arguments. This is workaround for limitation in iDeep internal # implementation. if self.indices is not None: indices = self.indices if len(indices) == 0: return False # Empty sequence if indices[0] == 0: return False # Sequence starting with 0 for i in six.moves.range(1, len(indices)): if indices[i-1] == indices[i]: return False # Sequence with duplicate index else: if self.sections == 1: return False # 1 # Workaround for iDeep segfault issue # See: # https://github.com/chainer/chainer/pull/4281#issuecomment-365830630 # TODO(niboshi): Remove this after iDeep is fixed. # Note: inputs[0].ndim is always 4. if (self.axis == 1 or self.axis == -3) and inputs[0].shape[1] == 8: return False return True def _forward_ideep(self, inputs): x, = inputs offsets = intel64.ideep.intVector() # TODO(iDeep) # bypass python3 issue when transfer array to std::vector<> # https://github.com/SimpleITK/SimpleITK/issues/106 axis = self.axis % x.ndim if self.indices is not None: for i in self.indices: offsets.push_back(int(i)) else: d = x.shape[self.axis] step = d // self.sections for i in six.moves.range(step, d, step): offsets.push_back(i) ret = intel64.ideep.concat.Backward( intel64.ideep.array(x), offsets, axis) self._shapes = [r.shape for r in ret] return ret def backward(self, indexes, grad_outputs): dtype = self.inputs[0].dtype grads = [ self._xp.zeros(shape, dtype=dtype) if gy is None else gy for gy, shape in six.moves.zip(grad_outputs, self._shapes)] return chainer.functions.concat(grads, self.axis), def split_axis(x, indices_or_sections, axis, force_tuple=True): """Splits given variables along an axis. Args: x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \ :class:`cupy.ndarray`): A variable to be split. indices_or_sections (int or 1-D array): If this argument is an integer, N, the array will be divided into N equal arrays along axis. If it is a 1-D array of sorted integers, it indicates the positions where the array is split. axis (int): Axis that the input array is split along. force_tuple (bool): If ``True`` (the default) this method returns a tuple even when the number of outputs is one. Otherwise, if ``False`` a Variable will be returned when the number of outputs is one. Returns: tuple or Variable: Tuple of :class:`~chainer.Variable` objects if the number of outputs is more than 1 or :class:`~chainer.Variable` otherwise. When ``force_tuple`` is ``True``, returned value is always a tuple regardless of the number of outputs. """ res = SplitAxis(indices_or_sections, axis).apply((x,)) if force_tuple or len(res) != 1: return res return res[0]
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/functions/array/split_axis.py
import numpy import chainer from chainer.backends import cuda from chainer import distribution from chainer.functions.math import exponential from chainer.functions.math import lgamma EULER = 0.57721566490153286060651209008240243104215933593992 class Gumbel(distribution.Distribution): """Gumbel Distribution. The probability density function of the distribution is expressed as .. math:: f(x) = \\frac{1}{\\eta} \ \\exp\\left\\{ - \\frac{x - \\mu}{\\eta} \\right\\} \ \\exp\\left[-\\exp\\left\\{-\\frac{x - \\mu}{\\eta} \ \\right\\}\\right], Args: loc(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of distribution :math:`\\mu`. scale(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of distribution :math:`\\eta`. """ def __init__(self, loc, scale): super(Gumbel, self).__init__() self.__loc = chainer.as_variable(loc) self.__scale = chainer.as_variable(scale) @property def loc(self): return self.__loc @property def scale(self): return self.__scale @property def batch_shape(self): return self.loc.shape @property def entropy(self): return exponential.log(self.scale) + (EULER + 1) @property def event_shape(self): return () @property def _is_gpu(self): return isinstance(self.loc.data, cuda.ndarray) def log_prob(self, x): y = (x - self.loc) / self.scale return - exponential.log(self.scale) - y - exponential.exp(-y) @property def mean(self): return self.loc + EULER * self.scale def sample_n(self, n): xp = cuda.get_array_module(self.loc) if xp is cuda.cupy: eps = xp.random.gumbel( size=(n,)+self.batch_shape, dtype=self.loc.dtype) else: eps = xp.random.gumbel( size=(n,)+self.batch_shape).astype(self.loc.dtype) noise = self.scale * eps + self.loc return noise @property def support(self): return 'real' @property def variance(self): return numpy.pi ** 2 * self.scale ** 2 / 6 @distribution.register_kl(Gumbel, Gumbel) def _kl_gumbel_gumbel(dist1, dist2): scale_1d2 = dist1.scale / dist2.scale return exponential.log(dist2.scale) - exponential.log(dist1.scale) \ + EULER * (scale_1d2 - 1.) \ + exponential.exp((dist2.loc - dist1.loc) / dist2.scale + lgamma.lgamma(scale_1d2 + 1.)) \ - 1 + (dist1.loc - dist2.loc) / dist2.scale
import copy import functools import unittest import chainer import chainer.testing import chainer.testing.attr import numpy import pytest import chainermn import chainermn.functions class TestPointToPointCommunication(unittest.TestCase): def setup(self, gpu): self.gpu = gpu if self.gpu: self.communicator = chainermn.create_communicator('hierarchical') device = self.communicator.intra_rank chainer.cuda.get_device_from_id(device).use() else: self.communicator = chainermn.create_communicator('naive') device = -1 if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size # Activation function. self.f = chainer.functions.sigmoid # Evaluation function. self.evaluation = chainer.functions.mean_squared_error # Input data. self.x = chainer.Variable( numpy.arange(10).reshape(1, 10).astype(numpy.float32) / 10) self.model = chainer.links.Linear( 10, 10, initialW=self._init_w(self.communicator.rank)) self.entire_model = [chainer.links.Linear( 10, 10, initialW=self._init_w(l)) for l in range(self.communicator.size)] self.device = device if device >= 0: self.x.to_gpu() self.model.to_gpu() for model in self.entire_model: model.to_gpu() def _init_w(self, l): return 1.0 * numpy.arange(100).reshape(10, 10).astype(numpy.float32) \ / ((l + 1) * 100) def check_communication(self): if self.communicator.rank == 0: # Input process. y = self.f(self.model(self.x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() grad = self.model.W.grad # Compute the expected gradient. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) err_ = self.evaluation(x_, self.x) err_.backward() grad_expected = self.entire_model[0].W.grad chainer.testing.assert_allclose(grad, grad_expected) elif self.communicator.rank == self.communicator.size - 1: # Output process. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = self.evaluation(y, self.x) err.backward() # Compute the expected output. x_ = self.x for l in range(self.communicator.size): x_ = self.f(self.entire_model[l](x_)) y_expect = x_ chainer.testing.assert_allclose(y.data, y_expect.data) else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_communication_cpu(self): self.setup(False) self.check_communication() @chainer.testing.attr.gpu def test_communication_gpu(self): self.setup(True) self.check_communication() def check_retain(self): if self.communicator.rank == 0: # Starting process. t = copy.copy(self.x) y = self.f(self.model(self.x)) dlg = chainermn.functions.send( y, self.communicator, self.rank_send) # Unless delegate_variable is used, backprop would stop here. x = chainermn.functions.recv( self.communicator, self.rank_recv, delegate_variable=dlg) err = self.evaluation(x, t) err.backward() # self.x.grad is None if backprop stops in the middle. assert self.x.grad is not None else: # Intermediate processes. x = chainermn.functions.recv(self.communicator, self.rank_recv) y = self.f(self.model(x)) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_retain_cpu(self): self.setup(False) self.check_retain() @chainer.testing.attr.gpu def test_retain_gpu(self): self.setup(True) self.check_retain() def check_tuple_communication(self, length): if self.communicator.rank == 0: y = [] for i in range(length): _y = self.f(self.model(self.x)) y.append(_y) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() elif self.communicator.rank == self.communicator.size - 1: y = chainermn.functions.recv( self.communicator, self.rank_recv, force_tuple=True) assert isinstance(y, tuple) z = functools.reduce(lambda x, y: x + y, y) err = self.evaluation(z, self.x) err.backward() else: y = chainermn.functions.recv(self.communicator, self.rank_recv) err = chainermn.functions.send( y, self.communicator, self.rank_send) err.backward() def test_tuple_communication1_cpu(self): self.setup(False) self.check_tuple_communication(1) def test_tuple_communication2_cpu(self): self.setup(False) self.check_tuple_communication(2) @chainer.testing.attr.gpu def test_tuple_communication1_gpu(self): self.setup(True) self.check_tuple_communication(1) @chainer.testing.attr.gpu def test_tuple_communication2_gpu(self): self.setup(True) self.check_tuple_communication(2) class TestNonVariableInput(unittest.TestCase): def setUp(self): self.communicator = chainermn.create_communicator('naive') if self.communicator.size < 2: pytest.skip("This test is for multinode") self.rank_send = (self.communicator.rank + 1) % self.communicator.size self.rank_recv = (self.communicator.rank - 1) % self.communicator.size def test_non_variable_send(self): """Checks if backward will be called even if inputs are not Variable. This test confirms whether deadlock occurs when numpy/cupy array is given as an input of send. In this case, the input will be converted to chainer Variable without ``requires_grad``, thus ``backward`` will not be called without any modification. """ if self.communicator.rank == 0: x = numpy.ones((1, 10)).astype(numpy.float32) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) x = chainermn.functions.pseudo_connect(phi, x) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() elif self.communicator.rank == self.communicator.size - 1: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) y = chainer.functions.sum(x) t = numpy.array(0).astype(numpy.float32) z = chainer.functions.mean_squared_error(y, t) z.backward() else: x = chainermn.functions.recv( self.communicator, rank=self.rank_recv) phi = chainermn.functions.send( x, self.communicator, rank=self.rank_send) phi.backward()
ktnyt/chainer
tests/chainermn_tests/functions_tests/test_point_to_point_communication.py
chainer/distributions/gumbel.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module contains the AWS DynamoDB hook""" from typing import Iterable, List, Optional from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook class AwsDynamoDBHook(AwsBaseHook): """ Interact with AWS DynamoDB. Additional arguments (such as ``aws_conn_id``) may be specified and are passed down to the underlying AwsBaseHook. .. seealso:: :class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook` :param table_keys: partition key and sort key :type table_keys: list :param table_name: target DynamoDB table :type table_name: str """ def __init__( self, *args, table_keys: Optional[List] = None, table_name: Optional[str] = None, **kwargs ) -> None: self.table_keys = table_keys self.table_name = table_name kwargs["resource_type"] = "dynamodb" super().__init__(*args, **kwargs) def write_batch_data(self, items: Iterable) -> bool: """Write batch items to DynamoDB table with provisioned throughout capacity.""" try: table = self.get_conn().Table(self.table_name) with table.batch_writer(overwrite_by_pkeys=self.table_keys) as batch: for item in items: batch.put_item(Item=item) return True except Exception as general_error: raise AirflowException(f"Failed to insert items in dynamodb, error: {str(general_error)}")
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/providers/amazon/aws/hooks/dynamodb.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use :mod:`airflow.providers.snowflake.operators.snowflake`.""" import warnings from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator # noqa warnings.warn( "This module is deprecated. Please use `airflow.providers.snowflake.operators.snowflake`.", DeprecationWarning, stacklevel=2, )
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/contrib/operators/snowflake_operator.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import warnings from airflow.providers.tableau.hooks.tableau import TableauHook, TableauJobFinishCode # noqa warnings.warn( "This module is deprecated. Please use `airflow.providers.tableau.hooks.tableau`.", DeprecationWarning, stacklevel=2, )
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/providers/salesforce/hooks/tableau.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Add schedule interval to dag Revision ID: dd4ecb8fbee3 Revises: c8ffec048a3b Create Date: 2018-12-27 18:39:25.748032 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = 'dd4ecb8fbee3' down_revision = 'c8ffec048a3b' branch_labels = None depends_on = None def upgrade(): op.add_column('dag', sa.Column('schedule_interval', sa.Text(), nullable=True)) def downgrade(): op.drop_column('dag', 'schedule_interval')
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os from airflow import models from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator from airflow.providers.google.suite.operators.sheets import GoogleSheetsCreateSpreadsheetOperator from airflow.providers.google.suite.transfers.gcs_to_sheets import GCSToGoogleSheetsOperator from airflow.utils.dates import days_ago GCS_BUCKET = os.environ.get("SHEETS_GCS_BUCKET", "test28397ye") SPREADSHEET_ID = os.environ.get("SPREADSHEET_ID", "1234567890qwerty") NEW_SPREADSHEET_ID = os.environ.get("NEW_SPREADSHEET_ID", "1234567890qwerty") SPREADSHEET = { "properties": {"title": "Test1"}, "sheets": [{"properties": {"title": "Sheet1"}}], } with models.DAG( "example_sheets_gcs", schedule_interval=None, # Override to match your needs, start_date=days_ago(1), tags=["example"], ) as dag: # [START upload_sheet_to_gcs] upload_sheet_to_gcs = GoogleSheetsToGCSOperator( task_id="upload_sheet_to_gcs", destination_bucket=GCS_BUCKET, spreadsheet_id=SPREADSHEET_ID, ) # [END upload_sheet_to_gcs] # [START create_spreadsheet] create_spreadsheet = GoogleSheetsCreateSpreadsheetOperator( task_id="create_spreadsheet", spreadsheet=SPREADSHEET ) # [END create_spreadsheet] # [START print_spreadsheet_url] print_spreadsheet_url = BashOperator( task_id="print_spreadsheet_url", bash_command="echo {{ task_instance.xcom_pull('create_spreadsheet', key='spreadsheet_url') }}", ) # [END print_spreadsheet_url] # [START upload_gcs_to_sheet] upload_gcs_to_sheet = GCSToGoogleSheetsOperator( task_id="upload_gcs_to_sheet", bucket_name=GCS_BUCKET, object_name="{{ task_instance.xcom_pull('upload_sheet_to_gcs')[0] }}", spreadsheet_id=NEW_SPREADSHEET_ID, ) # [END upload_gcs_to_sheet] create_spreadsheet >> print_spreadsheet_url upload_sheet_to_gcs >> upload_gcs_to_sheet
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/providers/google/suite/example_dags/example_sheets.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Example Airflow DAG that shows the complex DAG structure. """ from airflow import models from airflow.models.baseoperator import chain from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator from airflow.utils.dates import days_ago with models.DAG( dag_id="example_complex", schedule_interval=None, start_date=days_ago(1), tags=['example', 'example2', 'example3'], ) as dag: # Create create_entry_group = BashOperator(task_id="create_entry_group", bash_command="echo create_entry_group") create_entry_group_result = BashOperator( task_id="create_entry_group_result", bash_command="echo create_entry_group_result" ) create_entry_group_result2 = BashOperator( task_id="create_entry_group_result2", bash_command="echo create_entry_group_result2" ) create_entry_gcs = BashOperator(task_id="create_entry_gcs", bash_command="echo create_entry_gcs") create_entry_gcs_result = BashOperator( task_id="create_entry_gcs_result", bash_command="echo create_entry_gcs_result" ) create_entry_gcs_result2 = BashOperator( task_id="create_entry_gcs_result2", bash_command="echo create_entry_gcs_result2" ) create_tag = BashOperator(task_id="create_tag", bash_command="echo create_tag") create_tag_result = BashOperator(task_id="create_tag_result", bash_command="echo create_tag_result") create_tag_result2 = BashOperator(task_id="create_tag_result2", bash_command="echo create_tag_result2") create_tag_template = BashOperator(task_id="create_tag_template", bash_command="echo create_tag_template") create_tag_template_result = BashOperator( task_id="create_tag_template_result", bash_command="echo create_tag_template_result" ) create_tag_template_result2 = BashOperator( task_id="create_tag_template_result2", bash_command="echo create_tag_template_result2" ) create_tag_template_field = BashOperator( task_id="create_tag_template_field", bash_command="echo create_tag_template_field" ) create_tag_template_field_result = BashOperator( task_id="create_tag_template_field_result", bash_command="echo create_tag_template_field_result" ) create_tag_template_field_result2 = BashOperator( task_id="create_tag_template_field_result2", bash_command="echo create_tag_template_field_result" ) # Delete delete_entry = BashOperator(task_id="delete_entry", bash_command="echo delete_entry") create_entry_gcs >> delete_entry delete_entry_group = BashOperator(task_id="delete_entry_group", bash_command="echo delete_entry_group") create_entry_group >> delete_entry_group delete_tag = BashOperator(task_id="delete_tag", bash_command="echo delete_tag") create_tag >> delete_tag delete_tag_template_field = BashOperator( task_id="delete_tag_template_field", bash_command="echo delete_tag_template_field" ) delete_tag_template = BashOperator(task_id="delete_tag_template", bash_command="echo delete_tag_template") # Get get_entry_group = BashOperator(task_id="get_entry_group", bash_command="echo get_entry_group") get_entry_group_result = BashOperator( task_id="get_entry_group_result", bash_command="echo get_entry_group_result" ) get_entry = BashOperator(task_id="get_entry", bash_command="echo get_entry") get_entry_result = BashOperator(task_id="get_entry_result", bash_command="echo get_entry_result") get_tag_template = BashOperator(task_id="get_tag_template", bash_command="echo get_tag_template") get_tag_template_result = BashOperator( task_id="get_tag_template_result", bash_command="echo get_tag_template_result" ) # List list_tags = BashOperator(task_id="list_tags", bash_command="echo list_tags") list_tags_result = BashOperator(task_id="list_tags_result", bash_command="echo list_tags_result") # Lookup lookup_entry = BashOperator(task_id="lookup_entry", bash_command="echo lookup_entry") lookup_entry_result = BashOperator(task_id="lookup_entry_result", bash_command="echo lookup_entry_result") # Rename rename_tag_template_field = BashOperator( task_id="rename_tag_template_field", bash_command="echo rename_tag_template_field" ) # Search search_catalog = PythonOperator(task_id="search_catalog", python_callable=lambda: print("search_catalog")) search_catalog_result = BashOperator( task_id="search_catalog_result", bash_command="echo search_catalog_result" ) # Update update_entry = BashOperator(task_id="update_entry", bash_command="echo update_entry") update_tag = BashOperator(task_id="update_tag", bash_command="echo update_tag") update_tag_template = BashOperator(task_id="update_tag_template", bash_command="echo update_tag_template") update_tag_template_field = BashOperator( task_id="update_tag_template_field", bash_command="echo update_tag_template_field" ) # Create create_tasks = [ create_entry_group, create_entry_gcs, create_tag_template, create_tag_template_field, create_tag, ] chain(*create_tasks) create_entry_group >> delete_entry_group create_entry_group >> create_entry_group_result create_entry_group >> create_entry_group_result2 create_entry_gcs >> delete_entry create_entry_gcs >> create_entry_gcs_result create_entry_gcs >> create_entry_gcs_result2 create_tag_template >> delete_tag_template_field create_tag_template >> create_tag_template_result create_tag_template >> create_tag_template_result2 create_tag_template_field >> delete_tag_template_field create_tag_template_field >> create_tag_template_field_result create_tag_template_field >> create_tag_template_field_result2 create_tag >> delete_tag create_tag >> create_tag_result create_tag >> create_tag_result2 # Delete delete_tasks = [ delete_tag, delete_tag_template_field, delete_tag_template, delete_entry_group, delete_entry, ] chain(*delete_tasks) # Get create_tag_template >> get_tag_template >> delete_tag_template get_tag_template >> get_tag_template_result create_entry_gcs >> get_entry >> delete_entry get_entry >> get_entry_result create_entry_group >> get_entry_group >> delete_entry_group get_entry_group >> get_entry_group_result # List create_tag >> list_tags >> delete_tag list_tags >> list_tags_result # Lookup create_entry_gcs >> lookup_entry >> delete_entry lookup_entry >> lookup_entry_result # Rename create_tag_template_field >> rename_tag_template_field >> delete_tag_template_field # Search chain(create_tasks, search_catalog, delete_tasks) search_catalog >> search_catalog_result # Update create_entry_gcs >> update_entry >> delete_entry create_tag >> update_tag >> delete_tag create_tag_template >> update_tag_template >> delete_tag_template create_tag_template_field >> update_tag_template_field >> rename_tag_template_field
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/example_dags/example_complex.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use :mod:`airflow.providers.google.cloud.operators.bigquery`.""" import warnings from airflow.providers.google.cloud.operators.bigquery import BigQueryDeleteTableOperator warnings.warn( "This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery`.", DeprecationWarning, stacklevel=2, ) class BigQueryTableDeleteOperator(BigQueryDeleteTableOperator): """ This class is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`.""", DeprecationWarning, stacklevel=2, ) super().__init__(*args, **kwargs)
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/contrib/operators/bigquery_table_delete_operator.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Add external executor ID to TI Revision ID: e1a11ece99cc Revises: b247b1e3d1ed Create Date: 2020-09-12 08:23:45.698865 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = 'e1a11ece99cc' down_revision = 'b247b1e3d1ed' branch_labels = None depends_on = None def upgrade(): """Apply Add external executor ID to TI""" with op.batch_alter_table('task_instance', schema=None) as batch_op: batch_op.add_column(sa.Column('external_executor_id', sa.String(length=250), nullable=True)) def downgrade(): """Unapply Add external executor ID to TI""" with op.batch_alter_table('task_instance', schema=None) as batch_op: batch_op.drop_column('external_executor_id')
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/migrations/versions/e1a11ece99cc_add_external_executor_id_to_ti.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import datetime import hashlib import os import time from datetime import timedelta from typing import Any, Callable, Dict, Iterable from airflow.configuration import conf from airflow.exceptions import ( AirflowException, AirflowRescheduleException, AirflowSensorTimeout, AirflowSkipException, ) from airflow.models import BaseOperator, SensorInstance from airflow.models.skipmixin import SkipMixin from airflow.models.taskreschedule import TaskReschedule from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep from airflow.utils import timezone # We need to keep the import here because GCSToLocalFilesystemOperator released in # Google Provider before 3.0.0 imported apply_defaults from here. # See https://github.com/apache/airflow/issues/16035 from airflow.utils.decorators import apply_defaults class BaseSensorOperator(BaseOperator, SkipMixin): """ Sensor operators are derived from this class and inherit these attributes. Sensor operators keep executing at a time interval and succeed when a criteria is met and fail if and when they time out. :param soft_fail: Set to true to mark the task as SKIPPED on failure :type soft_fail: bool :param poke_interval: Time in seconds that the job should wait in between each tries :type poke_interval: float :param timeout: Time, in seconds before the task times out and fails. :type timeout: float :param mode: How the sensor operates. Options are: ``{ poke | reschedule }``, default is ``poke``. When set to ``poke`` the sensor is taking up a worker slot for its whole execution time and sleeps between pokes. Use this mode if the expected runtime of the sensor is short or if a short poke interval is required. Note that the sensor will hold onto a worker slot and a pool slot for the duration of the sensor's runtime in this mode. When set to ``reschedule`` the sensor task frees the worker slot when the criteria is not yet met and it's rescheduled at a later time. Use this mode if the time before the criteria is met is expected to be quite long. The poke interval should be more than one minute to prevent too much load on the scheduler. :type mode: str :param exponential_backoff: allow progressive longer waits between pokes by using exponential backoff algorithm :type exponential_backoff: bool """ ui_color = '#e6f1f2' # type: str valid_modes = ['poke', 'reschedule'] # type: Iterable[str] # As the poke context in smart sensor defines the poking job signature only, # The execution_fields defines other execution details # for this tasks such as the customer defined timeout, the email and the alert # setup. Smart sensor serialize these attributes into a different DB column so # that smart sensor service is able to handle corresponding execution details # without breaking the sensor poking logic with dedup. execution_fields = ( 'poke_interval', 'retries', 'execution_timeout', 'timeout', 'email', 'email_on_retry', 'email_on_failure', ) def __init__( self, *, poke_interval: float = 60, timeout: float = 60 * 60 * 24 * 7, soft_fail: bool = False, mode: str = 'poke', exponential_backoff: bool = False, **kwargs, ) -> None: super().__init__(**kwargs) self.poke_interval = poke_interval self.soft_fail = soft_fail self.timeout = timeout self.mode = mode self.exponential_backoff = exponential_backoff self._validate_input_values() self.sensor_service_enabled = conf.getboolean('smart_sensor', 'use_smart_sensor') self.sensors_support_sensor_service = set( map(lambda l: l.strip(), conf.get('smart_sensor', 'sensors_enabled').split(',')) ) def _validate_input_values(self) -> None: if not isinstance(self.poke_interval, (int, float)) or self.poke_interval < 0: raise AirflowException("The poke_interval must be a non-negative number") if not isinstance(self.timeout, (int, float)) or self.timeout < 0: raise AirflowException("The timeout must be a non-negative number") if self.mode not in self.valid_modes: raise AirflowException( "The mode must be one of {valid_modes}," "'{d}.{t}'; received '{m}'.".format( valid_modes=self.valid_modes, d=self.dag.dag_id if self.dag else "", t=self.task_id, m=self.mode, ) ) def poke(self, context: Dict) -> bool: """ Function that the sensors defined while deriving this class should override. """ raise AirflowException('Override me.') def is_smart_sensor_compatible(self): check_list = [ not self.sensor_service_enabled, self.on_success_callback, self.on_retry_callback, self.on_failure_callback, ] for status in check_list: if status: return False operator = self.__class__.__name__ return operator in self.sensors_support_sensor_service def register_in_sensor_service(self, ti, context): """ Register ti in smart sensor service :param ti: Task instance object. :param context: TaskInstance template context from the ti. :return: boolean """ poke_context = self.get_poke_context(context) execution_context = self.get_execution_context(context) return SensorInstance.register(ti, poke_context, execution_context) def get_poke_context(self, context): """ Return a dictionary with all attributes in poke_context_fields. The poke_context with operator class can be used to identify a unique sensor job. :param context: TaskInstance template context. :return: A dictionary with key in poke_context_fields. """ if not context: self.log.info("Function get_poke_context doesn't have a context input.") poke_context_fields = getattr(self.__class__, "poke_context_fields", None) result = {key: getattr(self, key, None) for key in poke_context_fields} return result def get_execution_context(self, context): """ Return a dictionary with all attributes in execution_fields. The execution_context include execution requirement for each sensor task such as timeout setup, email_alert setup. :param context: TaskInstance template context. :return: A dictionary with key in execution_fields. """ if not context: self.log.info("Function get_execution_context doesn't have a context input.") execution_fields = self.__class__.execution_fields result = {key: getattr(self, key, None) for key in execution_fields} if result['execution_timeout'] and isinstance(result['execution_timeout'], datetime.timedelta): result['execution_timeout'] = result['execution_timeout'].total_seconds() return result def execute(self, context: Dict) -> Any: started_at = None if self.reschedule: # If reschedule, use the start date of the first try (first try can be either the very # first execution of the task, or the first execution after the task was cleared.) first_try_number = context['ti'].max_tries - self.retries + 1 task_reschedules = TaskReschedule.find_for_task_instance( context['ti'], try_number=first_try_number ) if task_reschedules: started_at = task_reschedules[0].start_date else: started_at = timezone.utcnow() def run_duration() -> float: # If we are in reschedule mode, then we have to compute diff # based on the time in a DB, so can't use time.monotonic nonlocal started_at return (timezone.utcnow() - started_at).total_seconds() else: started_at = time.monotonic() def run_duration() -> float: nonlocal started_at return time.monotonic() - started_at try_number = 1 log_dag_id = self.dag.dag_id if self.has_dag() else "" while not self.poke(context): if run_duration() > self.timeout: # If sensor is in soft fail mode but times out raise AirflowSkipException. if self.soft_fail: raise AirflowSkipException(f"Snap. Time is OUT. DAG id: {log_dag_id}") else: raise AirflowSensorTimeout(f"Snap. Time is OUT. DAG id: {log_dag_id}") if self.reschedule: reschedule_date = timezone.utcnow() + timedelta( seconds=self._get_next_poke_interval(started_at, run_duration, try_number) ) raise AirflowRescheduleException(reschedule_date) else: time.sleep(self._get_next_poke_interval(started_at, run_duration, try_number)) try_number += 1 self.log.info("Success criteria met. Exiting.") def _get_next_poke_interval(self, started_at: Any, run_duration: Callable[[], int], try_number): """Using the similar logic which is used for exponential backoff retry delay for operators.""" if self.exponential_backoff: min_backoff = int(self.poke_interval * (2 ** (try_number - 2))) run_hash = int( hashlib.sha1( f"{self.dag_id}#{self.task_id}#{started_at}#{try_number}".encode("utf-8") ).hexdigest(), 16, ) modded_hash = min_backoff + run_hash % min_backoff delay_backoff_in_seconds = min(modded_hash, timedelta.max.total_seconds() - 1) new_interval = min(self.timeout - int(run_duration()), delay_backoff_in_seconds) self.log.info("new %s interval is %s", self.mode, new_interval) return new_interval else: return self.poke_interval def prepare_for_execution(self) -> BaseOperator: task = super().prepare_for_execution() # Sensors in `poke` mode can block execution of DAGs when running # with single process executor, thus we change the mode to`reschedule` # to allow parallel task being scheduled and executed if conf.get('core', 'executor') == "DebugExecutor": self.log.warning("DebugExecutor changes sensor mode to 'reschedule'.") task.mode = 'reschedule' return task @property def reschedule(self): """Define mode rescheduled sensors.""" return self.mode == 'reschedule' @property def deps(self): """ Adds one additional dependency for all sensor operators that checks if a sensor task instance can be rescheduled. """ if self.reschedule: return super().deps | {ReadyToRescheduleDep()} return super().deps def poke_mode_only(cls): """ Class Decorator for child classes of BaseSensorOperator to indicate that instances of this class are only safe to use poke mode. Will decorate all methods in the class to assert they did not change the mode from 'poke'. :param cls: BaseSensor class to enforce methods only use 'poke' mode. :type cls: type """ def decorate(cls_type): def mode_getter(_): return 'poke' def mode_setter(_, value): if value != 'poke': raise ValueError("cannot set mode to 'poke'.") if not issubclass(cls_type, BaseSensorOperator): raise ValueError( f"poke_mode_only decorator should only be " f"applied to subclasses of BaseSensorOperator," f" got:{cls_type}." ) cls_type.mode = property(mode_getter, mode_setter) return cls_type return decorate(cls) if 'BUILDING_AIRFLOW_DOCS' in os.environ: # flake8: noqa: F811 # Monkey patch hook to get good function headers while building docs apply_defaults = lambda x: x
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/sensors/base.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use :mod:`airflow.providers.google.cloud.hooks.compute`.""" import warnings from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook warnings.warn( "This module is deprecated. Please use airflow.providers.google.cloud.hooks.compute`", DeprecationWarning, stacklevel=2, ) class GceHook(ComputeEngineHook): """ This class is deprecated. Please use :class:`airflow.providers.google.cloud.hooks.compute.ComputeEngineHook`. """ def __init__(self, *args, **kwargs): warnings.warn( "This class is deprecated. Please use `airflow.providers.google.cloud.hooks.compute`.", DeprecationWarning, stacklevel=2, ) super().__init__(*args, **kwargs)
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/contrib/hooks/gcp_compute_hook.py
import functools import logging from ..decorators.decorator import RequestResponseDecorator from ..decorators.security import (get_apikeyinfo_func, get_basicinfo_func, get_bearerinfo_func, get_scope_validate_func, get_tokeninfo_func, security_deny, security_passthrough, verify_apikey, verify_basic, verify_bearer, verify_none, verify_oauth, verify_security) logger = logging.getLogger("connexion.operations.secure") DEFAULT_MIMETYPE = 'application/json' class SecureOperation(object): def __init__(self, api, security, security_schemes): """ :param security: list of security rules the application uses by default :type security: list :param security_definitions: `Security Definitions Object <https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#security-definitions-object>`_ :type security_definitions: dict """ self._api = api self._security = security self._security_schemes = security_schemes @property def api(self): return self._api @property def security(self): return self._security @property def security_schemes(self): return self._security_schemes @property def security_decorator(self): """ Gets the security decorator for operation From Swagger Specification: **Security Definitions Object** A declaration of the security schemes available to be used in the specification. This does not enforce the security schemes on the operations and only serves to provide the relevant details for each scheme. **Operation Object -> security** A declaration of which security schemes are applied for this operation. The list of values describes alternative security schemes that can be used (that is, there is a logical OR between the security requirements). This definition overrides any declared top-level security. To remove a top-level security declaration, an empty array can be used. **Security Requirement Object** Lists the required security schemes to execute this operation. The object can have multiple security schemes declared in it which are all required (that is, there is a logical AND between the schemes). The name used for each property **MUST** correspond to a security scheme declared in the Security Definitions. :rtype: types.FunctionType """ logger.debug('... Security: %s', self.security, extra=vars(self)) if not self.security: return security_passthrough auth_funcs = [] required_scopes = None for security_req in self.security: if not security_req: auth_funcs.append(verify_none()) continue elif len(security_req) > 1: logger.warning("... More than one security scheme in security requirement defined. " "**DENYING ALL REQUESTS**", extra=vars(self)) return security_deny scheme_name, scopes = next(iter(security_req.items())) security_scheme = self.security_schemes[scheme_name] if security_scheme['type'] == 'oauth2': required_scopes = scopes token_info_func = get_tokeninfo_func(security_scheme) scope_validate_func = get_scope_validate_func(security_scheme) if not token_info_func: logger.warning("... x-tokenInfoFunc missing", extra=vars(self)) continue auth_funcs.append(verify_oauth(token_info_func, scope_validate_func)) # Swagger 2.0 elif security_scheme['type'] == 'basic': basic_info_func = get_basicinfo_func(security_scheme) if not basic_info_func: logger.warning("... x-basicInfoFunc missing", extra=vars(self)) continue auth_funcs.append(verify_basic(basic_info_func)) # OpenAPI 3.0.0 elif security_scheme['type'] == 'http': scheme = security_scheme['scheme'].lower() if scheme == 'basic': basic_info_func = get_basicinfo_func(security_scheme) if not basic_info_func: logger.warning("... x-basicInfoFunc missing", extra=vars(self)) continue auth_funcs.append(verify_basic(basic_info_func)) elif scheme == 'bearer': bearer_info_func = get_bearerinfo_func(security_scheme) if not bearer_info_func: logger.warning("... x-bearerInfoFunc missing", extra=vars(self)) continue auth_funcs.append(verify_bearer(bearer_info_func)) else: logger.warning("... Unsupported http authorization scheme %s" % scheme, extra=vars(self)) elif security_scheme['type'] == 'apiKey': scheme = security_scheme.get('x-authentication-scheme', '').lower() if scheme == 'bearer': bearer_info_func = get_bearerinfo_func(security_scheme) if not bearer_info_func: logger.warning("... x-bearerInfoFunc missing", extra=vars(self)) continue auth_funcs.append(verify_bearer(bearer_info_func)) else: apikey_info_func = get_apikeyinfo_func(security_scheme) if not apikey_info_func: logger.warning("... x-apikeyInfoFunc missing", extra=vars(self)) continue auth_funcs.append(verify_apikey(apikey_info_func, security_scheme['in'], security_scheme['name'])) else: logger.warning("... Unsupported security scheme type %s" % security_scheme['type'], extra=vars(self)) return functools.partial(verify_security, auth_funcs, required_scopes) def get_mimetype(self): return DEFAULT_MIMETYPE @property def _request_response_decorator(self): """ Guarantees that instead of the internal representation of the operation handler response (connexion.lifecycle.ConnexionRequest) a framework specific object is returned. :rtype: types.FunctionType """ return RequestResponseDecorator(self.api, self.get_mimetype())
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/_vendor/connexion/operations/secure.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use :mod:`airflow.providers.http.operators.http`.""" import warnings from airflow.providers.http.operators.http import SimpleHttpOperator # noqa warnings.warn( "This module is deprecated. Please use `airflow.providers.http.operators.http`.", DeprecationWarning, stacklevel=2, )
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/operators/http_operator.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module contains a Dataproc Job sensor.""" from google.cloud.dataproc_v1beta2.types import JobStatus from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.dataproc import DataprocHook from airflow.sensors.base import BaseSensorOperator class DataprocJobSensor(BaseSensorOperator): """ Check for the state of a previously submitted Dataproc job. :param project_id: The ID of the google cloud project in which to create the cluster. (templated) :type project_id: str :param dataproc_job_id: The Dataproc job ID to poll. (templated) :type dataproc_job_id: str :param location: Required. The Cloud Dataproc region in which to handle the request. (templated) :type location: str :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. :type gcp_conn_id: str """ template_fields = ('project_id', 'location', 'dataproc_job_id') ui_color = '#f0eee4' def __init__( self, *, project_id: str, dataproc_job_id: str, location: str, gcp_conn_id: str = 'google_cloud_default', **kwargs, ) -> None: super().__init__(**kwargs) self.project_id = project_id self.gcp_conn_id = gcp_conn_id self.dataproc_job_id = dataproc_job_id self.location = location def poke(self, context: dict) -> bool: hook = DataprocHook(gcp_conn_id=self.gcp_conn_id) job = hook.get_job(job_id=self.dataproc_job_id, location=self.location, project_id=self.project_id) state = job.status.state if state == JobStatus.State.ERROR: raise AirflowException(f'Job failed:\n{job}') elif state in { JobStatus.State.CANCELLED, JobStatus.State.CANCEL_PENDING, JobStatus.State.CANCEL_STARTED, }: raise AirflowException(f'Job was cancelled:\n{job}') elif JobStatus.State.DONE == state: self.log.debug("Job %s completed successfully.", self.dataproc_job_id) return True elif JobStatus.State.ATTEMPT_FAILURE == state: self.log.debug("Job %s attempt has failed.", self.dataproc_job_id) self.log.info("Waiting for job %s to complete.", self.dataproc_job_id) return False
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/providers/google/cloud/sensors/dataproc.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module contains a Google Cloud Storage to BigQuery operator.""" import json from typing import Optional, Sequence, Union from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook from airflow.providers.google.cloud.hooks.gcs import GCSHook class GCSToBigQueryOperator(BaseOperator): """ Loads files from Google Cloud Storage into BigQuery. The schema to be used for the BigQuery table may be specified in one of two ways. You may either directly pass the schema fields in, or you may point the operator to a Google Cloud Storage object name. The object in Google Cloud Storage must be a JSON file with the schema fields in it. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:GCSToBigQueryOperator` :param bucket: The bucket to load from. (templated) :type bucket: str :param source_objects: String or List of Google Cloud Storage URIs to load from. (templated) If source_format is 'DATASTORE_BACKUP', the list must only contain a single URI. :type source_objects: str, list[str] :param destination_project_dataset_table: The dotted ``(<project>.|<project>:)<dataset>.<table>`` BigQuery table to load data into. If ``<project>`` is not included, project will be the project defined in the connection json. (templated) :type destination_project_dataset_table: str :param schema_fields: If set, the schema field list as defined here: https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load Should not be set when source_format is 'DATASTORE_BACKUP'. Parameter must be defined if 'schema_object' is null and autodetect is False. :type schema_fields: list :param schema_object: If set, a GCS object path pointing to a .json file that contains the schema for the table. (templated) Parameter must be defined if 'schema_fields' is null and autodetect is False. :type schema_object: str :param source_format: File format to export. :type source_format: str :param compression: [Optional] The compression type of the data source. Possible values include GZIP and NONE. The default value is NONE. This setting is ignored for Google Cloud Bigtable, Google Cloud Datastore backups and Avro formats. :type compression: str :param create_disposition: The create disposition if the table doesn't exist. :type create_disposition: str :param skip_leading_rows: Number of rows to skip when loading from a CSV. :type skip_leading_rows: int :param write_disposition: The write disposition if the table already exists. :type write_disposition: str :param field_delimiter: The delimiter to use when loading from a CSV. :type field_delimiter: str :param max_bad_records: The maximum number of bad records that BigQuery can ignore when running the job. :type max_bad_records: int :param quote_character: The value that is used to quote data sections in a CSV file. :type quote_character: str :param ignore_unknown_values: [Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. :type ignore_unknown_values: bool :param allow_quoted_newlines: Whether to allow quoted newlines (true) or not (false). :type allow_quoted_newlines: bool :param allow_jagged_rows: Accept rows that are missing trailing optional columns. The missing values are treated as nulls. If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. Only applicable to CSV, ignored for other formats. :type allow_jagged_rows: bool :param encoding: The character encoding of the data. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).csvOptions.encoding https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.csvOptions.encoding :param max_id_key: If set, the name of a column in the BigQuery table that's to be loaded. This will be used to select the MAX value from BigQuery after the load occurs. The results will be returned by the execute() command, which in turn gets stored in XCom for future operators to use. This can be helpful with incremental loads--during future executions, you can pick up from the max ID. :type max_id_key: str :param bigquery_conn_id: (Optional) The connection ID used to connect to Google Cloud and interact with the BigQuery service. :type bigquery_conn_id: str :param google_cloud_storage_conn_id: (Optional) The connection ID used to connect to Google Cloud and interact with the Google Cloud Storage service. :type google_cloud_storage_conn_id: str :param delegate_to: The account to impersonate using domain-wide delegation of authority, if any. For this to work, the service account making the request must have domain-wide delegation enabled. :type delegate_to: str :param schema_update_options: Allows the schema of the destination table to be updated as a side effect of the load job. :type schema_update_options: list :param src_fmt_configs: configure optional fields specific to the source format :type src_fmt_configs: dict :param external_table: Flag to specify if the destination table should be a BigQuery external table. Default Value is False. :type external_table: bool :param time_partitioning: configure optional time partitioning fields i.e. partition by field, type and expiration as per API specifications. Note that 'field' is not available in concurrency with dataset.table$partition. :type time_partitioning: dict :param cluster_fields: Request that the result of this load be stored sorted by one or more columns. BigQuery supports clustering for both partitioned and non-partitioned tables. The order of columns given determines the sort order. Not applicable for external tables. :type cluster_fields: list[str] :param autodetect: [Optional] Indicates if we should automatically infer the options and schema for CSV and JSON sources. (Default: ``True``). Parameter must be set to True if 'schema_fields' and 'schema_object' are undefined. It is suggested to set to True if table are create outside of Airflow. :type autodetect: bool :param encryption_configuration: [Optional] Custom encryption configuration (e.g., Cloud KMS keys). **Example**: :: encryption_configuration = { "kmsKeyName": "projects/testp/locations/us/keyRings/test-kr/cryptoKeys/test-key" } :type encryption_configuration: dict :param location: [Optional] The geographic location of the job. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location :type location: str :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). :type impersonation_chain: Union[str, Sequence[str]] :param labels: [Optional] Labels for the BiqQuery table. :type labels: dict :param description: [Optional] Description for the BigQuery table. :type description: str """ template_fields = ( 'bucket', 'source_objects', 'schema_object', 'destination_project_dataset_table', 'impersonation_chain', ) template_ext = ('.sql',) ui_color = '#f0eee4' def __init__( self, *, bucket, source_objects, destination_project_dataset_table, schema_fields=None, schema_object=None, source_format='CSV', compression='NONE', create_disposition='CREATE_IF_NEEDED', skip_leading_rows=0, write_disposition='WRITE_EMPTY', field_delimiter=',', max_bad_records=0, quote_character=None, ignore_unknown_values=False, allow_quoted_newlines=False, allow_jagged_rows=False, encoding="UTF-8", max_id_key=None, bigquery_conn_id='google_cloud_default', google_cloud_storage_conn_id='google_cloud_default', delegate_to=None, schema_update_options=(), src_fmt_configs=None, external_table=False, time_partitioning=None, cluster_fields=None, autodetect=True, encryption_configuration=None, location=None, impersonation_chain: Optional[Union[str, Sequence[str]]] = None, labels=None, description=None, **kwargs, ): super().__init__(**kwargs) # GCS config if src_fmt_configs is None: src_fmt_configs = {} if time_partitioning is None: time_partitioning = {} self.bucket = bucket self.source_objects = source_objects if isinstance(source_objects, list) else [source_objects] self.schema_object = schema_object # BQ config self.destination_project_dataset_table = destination_project_dataset_table self.schema_fields = schema_fields self.source_format = source_format self.compression = compression self.create_disposition = create_disposition self.skip_leading_rows = skip_leading_rows self.write_disposition = write_disposition self.field_delimiter = field_delimiter self.max_bad_records = max_bad_records self.quote_character = quote_character self.ignore_unknown_values = ignore_unknown_values self.allow_quoted_newlines = allow_quoted_newlines self.allow_jagged_rows = allow_jagged_rows self.external_table = external_table self.encoding = encoding self.max_id_key = max_id_key self.bigquery_conn_id = bigquery_conn_id self.google_cloud_storage_conn_id = google_cloud_storage_conn_id self.delegate_to = delegate_to self.schema_update_options = schema_update_options self.src_fmt_configs = src_fmt_configs self.time_partitioning = time_partitioning self.cluster_fields = cluster_fields self.autodetect = autodetect self.encryption_configuration = encryption_configuration self.location = location self.impersonation_chain = impersonation_chain self.labels = labels self.description = description def execute(self, context): bq_hook = BigQueryHook( bigquery_conn_id=self.bigquery_conn_id, delegate_to=self.delegate_to, location=self.location, impersonation_chain=self.impersonation_chain, ) if not self.schema_fields: if self.schema_object and self.source_format != 'DATASTORE_BACKUP': gcs_hook = GCSHook( gcp_conn_id=self.google_cloud_storage_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) blob = gcs_hook.download( bucket_name=self.bucket, object_name=self.schema_object, ) schema_fields = json.loads(blob.decode("utf-8")) elif self.schema_object is None and self.autodetect is False: raise AirflowException( 'At least one of `schema_fields`, `schema_object`, or `autodetect` must be passed.' ) else: schema_fields = None else: schema_fields = self.schema_fields source_uris = [f'gs://{self.bucket}/{source_object}' for source_object in self.source_objects] conn = bq_hook.get_conn() cursor = conn.cursor() if self.external_table: cursor.create_external_table( external_project_dataset_table=self.destination_project_dataset_table, schema_fields=schema_fields, source_uris=source_uris, source_format=self.source_format, compression=self.compression, skip_leading_rows=self.skip_leading_rows, field_delimiter=self.field_delimiter, max_bad_records=self.max_bad_records, quote_character=self.quote_character, ignore_unknown_values=self.ignore_unknown_values, allow_quoted_newlines=self.allow_quoted_newlines, allow_jagged_rows=self.allow_jagged_rows, encoding=self.encoding, src_fmt_configs=self.src_fmt_configs, encryption_configuration=self.encryption_configuration, labels=self.labels, description=self.description, ) else: cursor.run_load( destination_project_dataset_table=self.destination_project_dataset_table, schema_fields=schema_fields, source_uris=source_uris, source_format=self.source_format, autodetect=self.autodetect, create_disposition=self.create_disposition, skip_leading_rows=self.skip_leading_rows, write_disposition=self.write_disposition, field_delimiter=self.field_delimiter, max_bad_records=self.max_bad_records, quote_character=self.quote_character, ignore_unknown_values=self.ignore_unknown_values, allow_quoted_newlines=self.allow_quoted_newlines, allow_jagged_rows=self.allow_jagged_rows, encoding=self.encoding, schema_update_options=self.schema_update_options, src_fmt_configs=self.src_fmt_configs, time_partitioning=self.time_partitioning, cluster_fields=self.cluster_fields, encryption_configuration=self.encryption_configuration, labels=self.labels, description=self.description, ) if cursor.use_legacy_sql: escaped_table_name = f'[{self.destination_project_dataset_table}]' else: escaped_table_name = f'`{self.destination_project_dataset_table}`' if self.max_id_key: cursor.execute(f'SELECT MAX({self.max_id_key}) FROM {escaped_table_name}') row = cursor.fetchone() max_id = row[0] if row[0] else 0 self.log.info( 'Loaded BQ data with max %s.%s=%s', self.destination_project_dataset_table, self.max_id_key, max_id, )
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import io import json import os import unittest from contextlib import redirect_stdout import pytest from airflow import models, settings from airflow.cli import cli_parser from airflow.cli.commands import pool_command from airflow.models import Pool from airflow.settings import Session from airflow.utils.db import add_default_pool_if_not_exists class TestCliPools(unittest.TestCase): @classmethod def setUpClass(cls): cls.dagbag = models.DagBag(include_examples=True) cls.parser = cli_parser.get_parser() def setUp(self): super().setUp() settings.configure_orm() self.session = Session self._cleanup() def tearDown(self): self._cleanup() @staticmethod def _cleanup(session=None): if session is None: session = Session() session.query(Pool).filter(Pool.pool != Pool.DEFAULT_POOL_NAME).delete() session.commit() add_default_pool_if_not_exists() session.close() def test_pool_list(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) with redirect_stdout(io.StringIO()) as stdout: pool_command.pool_list(self.parser.parse_args(['pools', 'list'])) assert 'foo' in stdout.getvalue() def test_pool_list_with_args(self): pool_command.pool_list(self.parser.parse_args(['pools', 'list', '--output', 'json'])) def test_pool_create(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) assert self.session.query(Pool).count() == 2 def test_pool_get(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_get(self.parser.parse_args(['pools', 'get', 'foo'])) def test_pool_delete(self): pool_command.pool_set(self.parser.parse_args(['pools', 'set', 'foo', '1', 'test'])) pool_command.pool_delete(self.parser.parse_args(['pools', 'delete', 'foo'])) assert self.session.query(Pool).count() == 1 def test_pool_import_nonexistent(self): with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'nonexistent.json'])) def test_pool_import_invalid_json(self): with open('pools_import_invalid.json', mode='w') as file: file.write("not valid json") with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_invalid_pools(self): pool_config_input = {"foo": {"description": "foo_test"}} with open('pools_import_invalid.json', mode='w') as file: json.dump(pool_config_input, file) with pytest.raises(SystemExit): pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import_invalid.json'])) def test_pool_import_export(self): # Create two pools first pool_config_input = { "foo": {"description": "foo_test", "slots": 1}, 'default_pool': {'description': 'Default pool', 'slots': 128}, "baz": {"description": "baz_test", "slots": 2}, } with open('pools_import.json', mode='w') as file: json.dump(pool_config_input, file) # Import json pool_command.pool_import(self.parser.parse_args(['pools', 'import', 'pools_import.json'])) # Export json pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json'])) with open('pools_export.json') as file: pool_config_output = json.load(file) assert pool_config_input == pool_config_output, "Input and output pool files are not same" os.remove('pools_import.json') os.remove('pools_export.json')
apache/airflow
tests/cli/commands/test_pool_command.py
airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
import itertools import numpy as np import pytest import pandas as pd from pandas.core.internals import ExtensionBlock from .base import BaseExtensionTests class BaseReshapingTests(BaseExtensionTests): """Tests for reshaping and concatenation.""" @pytest.mark.parametrize('in_frame', [True, False]) def test_concat(self, data, in_frame): wrapped = pd.Series(data) if in_frame: wrapped = pd.DataFrame(wrapped) result = pd.concat([wrapped, wrapped], ignore_index=True) assert len(result) == len(data) * 2 if in_frame: dtype = result.dtypes[0] else: dtype = result.dtype assert dtype == data.dtype assert isinstance(result._data.blocks[0], ExtensionBlock) @pytest.mark.parametrize('in_frame', [True, False]) def test_concat_all_na_block(self, data_missing, in_frame): valid_block = pd.Series(data_missing.take([1, 1]), index=[0, 1]) na_block = pd.Series(data_missing.take([0, 0]), index=[2, 3]) if in_frame: valid_block = pd.DataFrame({"a": valid_block}) na_block = pd.DataFrame({"a": na_block}) result = pd.concat([valid_block, na_block]) if in_frame: expected = pd.DataFrame({"a": data_missing.take([1, 1, 0, 0])}) self.assert_frame_equal(result, expected) else: expected = pd.Series(data_missing.take([1, 1, 0, 0])) self.assert_series_equal(result, expected) def test_concat_mixed_dtypes(self, data): # https://github.com/pandas-dev/pandas/issues/20762 df1 = pd.DataFrame({'A': data[:3]}) df2 = pd.DataFrame({"A": [1, 2, 3]}) df3 = pd.DataFrame({"A": ['a', 'b', 'c']}).astype('category') dfs = [df1, df2, df3] # dataframes result = pd.concat(dfs) expected = pd.concat([x.astype(object) for x in dfs]) self.assert_frame_equal(result, expected) # series result = pd.concat([x['A'] for x in dfs]) expected = pd.concat([x['A'].astype(object) for x in dfs]) self.assert_series_equal(result, expected) # simple test for just EA and one other result = pd.concat([df1, df2]) expected = pd.concat([df1.astype('object'), df2.astype('object')]) self.assert_frame_equal(result, expected) result = pd.concat([df1['A'], df2['A']]) expected = pd.concat([df1['A'].astype('object'), df2['A'].astype('object')]) self.assert_series_equal(result, expected) def test_concat_columns(self, data, na_value): df1 = pd.DataFrame({'A': data[:3]}) df2 = pd.DataFrame({'B': [1, 2, 3]}) expected = pd.DataFrame({'A': data[:3], 'B': [1, 2, 3]}) result = pd.concat([df1, df2], axis=1) self.assert_frame_equal(result, expected) result = pd.concat([df1['A'], df2['B']], axis=1) self.assert_frame_equal(result, expected) # non-aligned df2 = pd.DataFrame({'B': [1, 2, 3]}, index=[1, 2, 3]) expected = pd.DataFrame({ 'A': data._from_sequence(list(data[:3]) + [na_value], dtype=data.dtype), 'B': [np.nan, 1, 2, 3]}) result = pd.concat([df1, df2], axis=1) self.assert_frame_equal(result, expected) result = pd.concat([df1['A'], df2['B']], axis=1) self.assert_frame_equal(result, expected) def test_align(self, data, na_value): a = data[:3] b = data[2:5] r1, r2 = pd.Series(a).align(pd.Series(b, index=[1, 2, 3])) # Assumes that the ctor can take a list of scalars of the type e1 = pd.Series(data._from_sequence(list(a) + [na_value], dtype=data.dtype)) e2 = pd.Series(data._from_sequence([na_value] + list(b), dtype=data.dtype)) self.assert_series_equal(r1, e1) self.assert_series_equal(r2, e2) def test_align_frame(self, data, na_value): a = data[:3] b = data[2:5] r1, r2 = pd.DataFrame({'A': a}).align( pd.DataFrame({'A': b}, index=[1, 2, 3]) ) # Assumes that the ctor can take a list of scalars of the type e1 = pd.DataFrame({'A': data._from_sequence(list(a) + [na_value], dtype=data.dtype)}) e2 = pd.DataFrame({'A': data._from_sequence([na_value] + list(b), dtype=data.dtype)}) self.assert_frame_equal(r1, e1) self.assert_frame_equal(r2, e2) def test_align_series_frame(self, data, na_value): # https://github.com/pandas-dev/pandas/issues/20576 ser = pd.Series(data, name='a') df = pd.DataFrame({"col": np.arange(len(ser) + 1)}) r1, r2 = ser.align(df) e1 = pd.Series(data._from_sequence(list(data) + [na_value], dtype=data.dtype), name=ser.name) self.assert_series_equal(r1, e1) self.assert_frame_equal(r2, df) def test_set_frame_expand_regular_with_extension(self, data): df = pd.DataFrame({"A": [1] * len(data)}) df['B'] = data expected = pd.DataFrame({"A": [1] * len(data), "B": data}) self.assert_frame_equal(df, expected) def test_set_frame_expand_extension_with_regular(self, data): df = pd.DataFrame({'A': data}) df['B'] = [1] * len(data) expected = pd.DataFrame({"A": data, "B": [1] * len(data)}) self.assert_frame_equal(df, expected) def test_set_frame_overwrite_object(self, data): # https://github.com/pandas-dev/pandas/issues/20555 df = pd.DataFrame({"A": [1] * len(data)}, dtype=object) df['A'] = data assert df.dtypes['A'] == data.dtype def test_merge(self, data, na_value): # GH-20743 df1 = pd.DataFrame({'ext': data[:3], 'int1': [1, 2, 3], 'key': [0, 1, 2]}) df2 = pd.DataFrame({'int2': [1, 2, 3, 4], 'key': [0, 0, 1, 3]}) res = pd.merge(df1, df2) exp = pd.DataFrame( {'int1': [1, 1, 2], 'int2': [1, 2, 3], 'key': [0, 0, 1], 'ext': data._from_sequence([data[0], data[0], data[1]], dtype=data.dtype)}) self.assert_frame_equal(res, exp[['ext', 'int1', 'key', 'int2']]) res = pd.merge(df1, df2, how='outer') exp = pd.DataFrame( {'int1': [1, 1, 2, 3, np.nan], 'int2': [1, 2, 3, np.nan, 4], 'key': [0, 0, 1, 2, 3], 'ext': data._from_sequence( [data[0], data[0], data[1], data[2], na_value], dtype=data.dtype)}) self.assert_frame_equal(res, exp[['ext', 'int1', 'key', 'int2']]) def test_merge_on_extension_array(self, data): # GH 23020 a, b = data[:2] key = type(data)._from_sequence([a, b], dtype=data.dtype) df = pd.DataFrame({"key": key, "val": [1, 2]}) result = pd.merge(df, df, on='key') expected = pd.DataFrame({"key": key, "val_x": [1, 2], "val_y": [1, 2]}) self.assert_frame_equal(result, expected) # order result = pd.merge(df.iloc[[1, 0]], df, on='key') expected = expected.iloc[[1, 0]].reset_index(drop=True) self.assert_frame_equal(result, expected) def test_merge_on_extension_array_duplicates(self, data): # GH 23020 a, b = data[:2] key = type(data)._from_sequence([a, b, a], dtype=data.dtype) df1 = pd.DataFrame({"key": key, "val": [1, 2, 3]}) df2 = pd.DataFrame({"key": key, "val": [1, 2, 3]}) result = pd.merge(df1, df2, on='key') expected = pd.DataFrame({ "key": key.take([0, 0, 0, 0, 1]), "val_x": [1, 1, 3, 3, 2], "val_y": [1, 3, 1, 3, 2], }) self.assert_frame_equal(result, expected) @pytest.mark.parametrize("columns", [ ["A", "B"], pd.MultiIndex.from_tuples([('A', 'a'), ('A', 'b')], names=['outer', 'inner']), ]) def test_stack(self, data, columns): df = pd.DataFrame({"A": data[:5], "B": data[:5]}) df.columns = columns result = df.stack() expected = df.astype(object).stack() # we need a second astype(object), in case the constructor inferred # object -> specialized, as is done for period. expected = expected.astype(object) if isinstance(expected, pd.Series): assert result.dtype == df.iloc[:, 0].dtype else: assert all(result.dtypes == df.iloc[:, 0].dtype) result = result.astype(object) self.assert_equal(result, expected) @pytest.mark.parametrize("index", [ # Two levels, uniform. pd.MultiIndex.from_product(([['A', 'B'], ['a', 'b']]), names=['a', 'b']), # non-uniform pd.MultiIndex.from_tuples([('A', 'a'), ('A', 'b'), ('B', 'b')]), # three levels, non-uniform pd.MultiIndex.from_product([('A', 'B'), ('a', 'b', 'c'), (0, 1, 2)]), pd.MultiIndex.from_tuples([ ('A', 'a', 1), ('A', 'b', 0), ('A', 'a', 0), ('B', 'a', 0), ('B', 'c', 1), ]), ]) @pytest.mark.parametrize("obj", ["series", "frame"]) def test_unstack(self, data, index, obj): data = data[:len(index)] if obj == "series": ser = pd.Series(data, index=index) else: ser = pd.DataFrame({"A": data, "B": data}, index=index) n = index.nlevels levels = list(range(n)) # [0, 1, 2] # [(0,), (1,), (2,), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] combinations = itertools.chain.from_iterable( itertools.permutations(levels, i) for i in range(1, n) ) for level in combinations: result = ser.unstack(level=level) assert all(isinstance(result[col].array, type(data)) for col in result.columns) expected = ser.astype(object).unstack(level=level) result = result.astype(object) self.assert_frame_equal(result, expected)
# -*- coding: utf-8 -*- import numpy as np import pytest from pandas import Timestamp from pandas.util.testing import assert_numpy_array_equal def test_assert_numpy_array_equal_shape_mismatch(): msg = """numpy array are different numpy array shapes are different \\[left\\]: \\(2L*,\\) \\[right\\]: \\(3L*,\\)""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([3, 4, 5])) def test_assert_numpy_array_equal_bad_type(): expected = "Expected type" with pytest.raises(AssertionError, match=expected): assert_numpy_array_equal(1, 2) @pytest.mark.parametrize("a,b,klass1,klass2", [ (np.array([1]), 1, "ndarray", "int"), (1, np.array([1]), "int", "ndarray"), ]) def test_assert_numpy_array_equal_class_mismatch(a, b, klass1, klass2): msg = """numpy array are different numpy array classes are different \\[left\\]: {klass1} \\[right\\]: {klass2}""".format(klass1=klass1, klass2=klass2) with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, b) def test_assert_numpy_array_equal_value_mismatch1(): msg = """numpy array are different numpy array values are different \\(66\\.66667 %\\) \\[left\\]: \\[nan, 2\\.0, 3\\.0\\] \\[right\\]: \\[1\\.0, nan, 3\\.0\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([np.nan, 2, 3]), np.array([1, np.nan, 3])) def test_assert_numpy_array_equal_value_mismatch2(): msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[1, 2\\] \\[right\\]: \\[1, 3\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([1, 3])) def test_assert_numpy_array_equal_value_mismatch3(): msg = """numpy array are different numpy array values are different \\(16\\.66667 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\], \\[5, 6\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\], \\[5, 6\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4], [5, 6]]), np.array([[1, 3], [3, 4], [5, 6]])) def test_assert_numpy_array_equal_value_mismatch4(): msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[1\\.1, 2\\.000001\\] \\[right\\]: \\[1\\.1, 2.0\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1.1, 2.000001]), np.array([1.1, 2.0])) def test_assert_numpy_array_equal_value_mismatch5(): msg = """numpy array are different numpy array values are different \\(16\\.66667 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\], \\[5, 6\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\], \\[5, 6\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4], [5, 6]]), np.array([[1, 3], [3, 4], [5, 6]])) def test_assert_numpy_array_equal_value_mismatch6(): msg = """numpy array are different numpy array values are different \\(25\\.0 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4]]), np.array([[1, 3], [3, 4]])) def test_assert_numpy_array_equal_shape_mismatch_override(): msg = """Index are different Index shapes are different \\[left\\]: \\(2L*,\\) \\[right\\]: \\(3L*,\\)""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([3, 4, 5]), obj="Index") def test_numpy_array_equal_unicode(): # see gh-20503 # # Test ensures that `assert_numpy_array_equals` raises the right # exception when comparing np.arrays containing differing unicode objects. msg = """numpy array are different numpy array values are different \\(33\\.33333 %\\) \\[left\\]: \\[á, à, ä\\] \\[right\\]: \\[á, à, å\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([u"á", u"à", u"ä"]), np.array([u"á", u"à", u"å"])) def test_numpy_array_equal_object(): a = np.array([Timestamp("2011-01-01"), Timestamp("2011-01-01")]) b = np.array([Timestamp("2011-01-01"), Timestamp("2011-01-02")]) msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[2011-01-01 00:00:00, 2011-01-01 00:00:00\\] \\[right\\]: \\[2011-01-01 00:00:00, 2011-01-02 00:00:00\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, b) @pytest.mark.parametrize("other_type", ["same", "copy"]) @pytest.mark.parametrize("check_same", ["same", "copy"]) def test_numpy_array_equal_copy_flag(other_type, check_same): a = np.array([1, 2, 3]) msg = None if other_type == "same": other = a.view() else: other = a.copy() if check_same != other_type: msg = (r"array\(\[1, 2, 3\]\) is not array\(\[1, 2, 3\]\)" if check_same == "same" else r"array\(\[1, 2, 3\]\) is array\(\[1, 2, 3\]\)") if msg is not None: with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, other, check_same=check_same) else: assert_numpy_array_equal(a, other, check_same=check_same)
MJuddBooth/pandas
pandas/tests/util/test_assert_numpy_array_equal.py
pandas/tests/extension/base/reshaping.py
# flake8: noqa from .common import ( is_array_like, is_bool, is_bool_dtype, is_categorical, is_categorical_dtype, is_complex, is_complex_dtype, is_datetime64_any_dtype, is_datetime64_dtype, is_datetime64_ns_dtype, is_datetime64tz_dtype, is_datetimetz, is_dict_like, is_dtype_equal, is_extension_array_dtype, is_extension_type, is_file_like, is_float, is_float_dtype, is_hashable, is_int64_dtype, is_integer, is_integer_dtype, is_interval, is_interval_dtype, is_iterator, is_list_like, is_named_tuple, is_number, is_numeric_dtype, is_object_dtype, is_period, is_period_dtype, is_re, is_re_compilable, is_scalar, is_signed_integer_dtype, is_sparse, is_string_dtype, is_timedelta64_dtype, is_timedelta64_ns_dtype, is_unsigned_integer_dtype, pandas_dtype)
# -*- coding: utf-8 -*- import numpy as np import pytest from pandas import Timestamp from pandas.util.testing import assert_numpy_array_equal def test_assert_numpy_array_equal_shape_mismatch(): msg = """numpy array are different numpy array shapes are different \\[left\\]: \\(2L*,\\) \\[right\\]: \\(3L*,\\)""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([3, 4, 5])) def test_assert_numpy_array_equal_bad_type(): expected = "Expected type" with pytest.raises(AssertionError, match=expected): assert_numpy_array_equal(1, 2) @pytest.mark.parametrize("a,b,klass1,klass2", [ (np.array([1]), 1, "ndarray", "int"), (1, np.array([1]), "int", "ndarray"), ]) def test_assert_numpy_array_equal_class_mismatch(a, b, klass1, klass2): msg = """numpy array are different numpy array classes are different \\[left\\]: {klass1} \\[right\\]: {klass2}""".format(klass1=klass1, klass2=klass2) with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, b) def test_assert_numpy_array_equal_value_mismatch1(): msg = """numpy array are different numpy array values are different \\(66\\.66667 %\\) \\[left\\]: \\[nan, 2\\.0, 3\\.0\\] \\[right\\]: \\[1\\.0, nan, 3\\.0\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([np.nan, 2, 3]), np.array([1, np.nan, 3])) def test_assert_numpy_array_equal_value_mismatch2(): msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[1, 2\\] \\[right\\]: \\[1, 3\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([1, 3])) def test_assert_numpy_array_equal_value_mismatch3(): msg = """numpy array are different numpy array values are different \\(16\\.66667 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\], \\[5, 6\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\], \\[5, 6\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4], [5, 6]]), np.array([[1, 3], [3, 4], [5, 6]])) def test_assert_numpy_array_equal_value_mismatch4(): msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[1\\.1, 2\\.000001\\] \\[right\\]: \\[1\\.1, 2.0\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1.1, 2.000001]), np.array([1.1, 2.0])) def test_assert_numpy_array_equal_value_mismatch5(): msg = """numpy array are different numpy array values are different \\(16\\.66667 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\], \\[5, 6\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\], \\[5, 6\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4], [5, 6]]), np.array([[1, 3], [3, 4], [5, 6]])) def test_assert_numpy_array_equal_value_mismatch6(): msg = """numpy array are different numpy array values are different \\(25\\.0 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4]]), np.array([[1, 3], [3, 4]])) def test_assert_numpy_array_equal_shape_mismatch_override(): msg = """Index are different Index shapes are different \\[left\\]: \\(2L*,\\) \\[right\\]: \\(3L*,\\)""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([3, 4, 5]), obj="Index") def test_numpy_array_equal_unicode(): # see gh-20503 # # Test ensures that `assert_numpy_array_equals` raises the right # exception when comparing np.arrays containing differing unicode objects. msg = """numpy array are different numpy array values are different \\(33\\.33333 %\\) \\[left\\]: \\[á, à, ä\\] \\[right\\]: \\[á, à, å\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([u"á", u"à", u"ä"]), np.array([u"á", u"à", u"å"])) def test_numpy_array_equal_object(): a = np.array([Timestamp("2011-01-01"), Timestamp("2011-01-01")]) b = np.array([Timestamp("2011-01-01"), Timestamp("2011-01-02")]) msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[2011-01-01 00:00:00, 2011-01-01 00:00:00\\] \\[right\\]: \\[2011-01-01 00:00:00, 2011-01-02 00:00:00\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, b) @pytest.mark.parametrize("other_type", ["same", "copy"]) @pytest.mark.parametrize("check_same", ["same", "copy"]) def test_numpy_array_equal_copy_flag(other_type, check_same): a = np.array([1, 2, 3]) msg = None if other_type == "same": other = a.view() else: other = a.copy() if check_same != other_type: msg = (r"array\(\[1, 2, 3\]\) is not array\(\[1, 2, 3\]\)" if check_same == "same" else r"array\(\[1, 2, 3\]\) is array\(\[1, 2, 3\]\)") if msg is not None: with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, other, check_same=check_same) else: assert_numpy_array_equal(a, other, check_same=check_same)
MJuddBooth/pandas
pandas/tests/util/test_assert_numpy_array_equal.py
pandas/core/dtypes/api.py
# -*- coding: utf-8 -*- from collections import defaultdict from functools import partial import itertools import operator import re import numpy as np from pandas._libs import internals as libinternals, lib from pandas.compat import map, range, zip from pandas.util._validators import validate_bool_kwarg from pandas.core.dtypes.cast import ( find_common_type, infer_dtype_from_scalar, maybe_convert_objects, maybe_promote) from pandas.core.dtypes.common import ( _NS_DTYPE, is_datetimelike_v_numeric, is_extension_array_dtype, is_extension_type, is_list_like, is_numeric_v_string_like, is_scalar) import pandas.core.dtypes.concat as _concat from pandas.core.dtypes.generic import ABCExtensionArray, ABCSeries from pandas.core.dtypes.missing import isna import pandas.core.algorithms as algos from pandas.core.arrays.sparse import _maybe_to_sparse from pandas.core.base import PandasObject from pandas.core.index import Index, MultiIndex, ensure_index from pandas.core.indexing import maybe_convert_indices from pandas.io.formats.printing import pprint_thing from .blocks import ( Block, CategoricalBlock, DatetimeTZBlock, ExtensionBlock, ObjectValuesExtensionBlock, _extend_blocks, _merge_blocks, _safe_reshape, get_block_type, make_block) from .concat import ( # all for concatenate_block_managers combine_concat_plans, concatenate_join_units, get_mgr_concatenation_plan, is_uniform_join_units) # TODO: flexible with index=None and/or items=None class BlockManager(PandasObject): """ Core internal data structure to implement DataFrame, Series, Panel, etc. Manage a bunch of labeled 2D mixed-type ndarrays. Essentially it's a lightweight blocked set of labeled data to be manipulated by the DataFrame public API class Attributes ---------- shape ndim axes values items Methods ------- set_axis(axis, new_labels) copy(deep=True) get_dtype_counts get_ftype_counts get_dtypes get_ftypes apply(func, axes, block_filter_fn) get_bool_data get_numeric_data get_slice(slice_like, axis) get(label) iget(loc) take(indexer, axis) reindex_axis(new_labels, axis) reindex_indexer(new_labels, indexer, axis) delete(label) insert(loc, label, value) set(label, value) Parameters ---------- Notes ----- This is *not* a public API class """ __slots__ = ['axes', 'blocks', '_ndim', '_shape', '_known_consolidated', '_is_consolidated', '_blknos', '_blklocs'] def __init__(self, blocks, axes, do_integrity_check=True): self.axes = [ensure_index(ax) for ax in axes] self.blocks = tuple(blocks) for block in blocks: if block.is_sparse: if len(block.mgr_locs) != 1: raise AssertionError("Sparse block refers to multiple " "items") else: if self.ndim != block.ndim: raise AssertionError( 'Number of Block dimensions ({block}) must equal ' 'number of axes ({self})'.format(block=block.ndim, self=self.ndim)) if do_integrity_check: self._verify_integrity() self._consolidate_check() self._rebuild_blknos_and_blklocs() def make_empty(self, axes=None): """ return an empty BlockManager with the items axis of len 0 """ if axes is None: axes = [ensure_index([])] + [ensure_index(a) for a in self.axes[1:]] # preserve dtype if possible if self.ndim == 1: blocks = np.array([], dtype=self.array_dtype) else: blocks = [] return self.__class__(blocks, axes) def __nonzero__(self): return True # Python3 compat __bool__ = __nonzero__ @property def shape(self): return tuple(len(ax) for ax in self.axes) @property def ndim(self): return len(self.axes) def set_axis(self, axis, new_labels): new_labels = ensure_index(new_labels) old_len = len(self.axes[axis]) new_len = len(new_labels) if new_len != old_len: raise ValueError( 'Length mismatch: Expected axis has {old} elements, new ' 'values have {new} elements'.format(old=old_len, new=new_len)) self.axes[axis] = new_labels def rename_axis(self, mapper, axis, copy=True, level=None): """ Rename one of axes. Parameters ---------- mapper : unary callable axis : int copy : boolean, default True level : int, default None """ obj = self.copy(deep=copy) obj.set_axis(axis, _transform_index(self.axes[axis], mapper, level)) return obj @property def _is_single_block(self): if self.ndim == 1: return True if len(self.blocks) != 1: return False blk = self.blocks[0] return (blk.mgr_locs.is_slice_like and blk.mgr_locs.as_slice == slice(0, len(self), 1)) def _rebuild_blknos_and_blklocs(self): """ Update mgr._blknos / mgr._blklocs. """ new_blknos = np.empty(self.shape[0], dtype=np.int64) new_blklocs = np.empty(self.shape[0], dtype=np.int64) new_blknos.fill(-1) new_blklocs.fill(-1) for blkno, blk in enumerate(self.blocks): rl = blk.mgr_locs new_blknos[rl.indexer] = blkno new_blklocs[rl.indexer] = np.arange(len(rl)) if (new_blknos == -1).any(): raise AssertionError("Gaps in blk ref_locs") self._blknos = new_blknos self._blklocs = new_blklocs @property def items(self): return self.axes[0] def _get_counts(self, f): """ return a dict of the counts of the function in BlockManager """ self._consolidate_inplace() counts = dict() for b in self.blocks: v = f(b) counts[v] = counts.get(v, 0) + b.shape[0] return counts def get_dtype_counts(self): return self._get_counts(lambda b: b.dtype.name) def get_ftype_counts(self): return self._get_counts(lambda b: b.ftype) def get_dtypes(self): dtypes = np.array([blk.dtype for blk in self.blocks]) return algos.take_1d(dtypes, self._blknos, allow_fill=False) def get_ftypes(self): ftypes = np.array([blk.ftype for blk in self.blocks]) return algos.take_1d(ftypes, self._blknos, allow_fill=False) def __getstate__(self): block_values = [b.values for b in self.blocks] block_items = [self.items[b.mgr_locs.indexer] for b in self.blocks] axes_array = [ax for ax in self.axes] extra_state = { '0.14.1': { 'axes': axes_array, 'blocks': [dict(values=b.values, mgr_locs=b.mgr_locs.indexer) for b in self.blocks] } } # First three elements of the state are to maintain forward # compatibility with 0.13.1. return axes_array, block_values, block_items, extra_state def __setstate__(self, state): def unpickle_block(values, mgr_locs): return make_block(values, placement=mgr_locs) if (isinstance(state, tuple) and len(state) >= 4 and '0.14.1' in state[3]): state = state[3]['0.14.1'] self.axes = [ensure_index(ax) for ax in state['axes']] self.blocks = tuple(unpickle_block(b['values'], b['mgr_locs']) for b in state['blocks']) else: # discard anything after 3rd, support beta pickling format for a # little while longer ax_arrays, bvalues, bitems = state[:3] self.axes = [ensure_index(ax) for ax in ax_arrays] if len(bitems) == 1 and self.axes[0].equals(bitems[0]): # This is a workaround for pre-0.14.1 pickles that didn't # support unpickling multi-block frames/panels with non-unique # columns/items, because given a manager with items ["a", "b", # "a"] there's no way of knowing which block's "a" is where. # # Single-block case can be supported under the assumption that # block items corresponded to manager items 1-to-1. all_mgr_locs = [slice(0, len(bitems[0]))] else: all_mgr_locs = [self.axes[0].get_indexer(blk_items) for blk_items in bitems] self.blocks = tuple( unpickle_block(values, mgr_locs) for values, mgr_locs in zip(bvalues, all_mgr_locs)) self._post_setstate() def _post_setstate(self): self._is_consolidated = False self._known_consolidated = False self._rebuild_blknos_and_blklocs() def __len__(self): return len(self.items) def __unicode__(self): output = pprint_thing(self.__class__.__name__) for i, ax in enumerate(self.axes): if i == 0: output += u'\nItems: {ax}'.format(ax=ax) else: output += u'\nAxis {i}: {ax}'.format(i=i, ax=ax) for block in self.blocks: output += u'\n{block}'.format(block=pprint_thing(block)) return output def _verify_integrity(self): mgr_shape = self.shape tot_items = sum(len(x.mgr_locs) for x in self.blocks) for block in self.blocks: if block._verify_integrity and block.shape[1:] != mgr_shape[1:]: construction_error(tot_items, block.shape[1:], self.axes) if len(self.items) != tot_items: raise AssertionError('Number of manager items must equal union of ' 'block items\n# manager items: {0}, # ' 'tot_items: {1}'.format( len(self.items), tot_items)) def apply(self, f, axes=None, filter=None, do_integrity_check=False, consolidate=True, **kwargs): """ iterate over the blocks, collect and create a new block manager Parameters ---------- f : the callable or function name to operate on at the block level axes : optional (if not supplied, use self.axes) filter : list, if supplied, only call the block if the filter is in the block do_integrity_check : boolean, default False. Do the block manager integrity check consolidate: boolean, default True. Join together blocks having same dtype Returns ------- Block Manager (new object) """ result_blocks = [] # filter kwarg is used in replace-* family of methods if filter is not None: filter_locs = set(self.items.get_indexer_for(filter)) if len(filter_locs) == len(self.items): # All items are included, as if there were no filtering filter = None else: kwargs['filter'] = filter_locs if consolidate: self._consolidate_inplace() if f == 'where': align_copy = True if kwargs.get('align', True): align_keys = ['other', 'cond'] else: align_keys = ['cond'] elif f == 'putmask': align_copy = False if kwargs.get('align', True): align_keys = ['new', 'mask'] else: align_keys = ['mask'] elif f == 'fillna': # fillna internally does putmask, maybe it's better to do this # at mgr, not block level? align_copy = False align_keys = ['value'] else: align_keys = [] # TODO(EA): may interfere with ExtensionBlock.setitem for blocks # with a .values attribute. aligned_args = {k: kwargs[k] for k in align_keys if hasattr(kwargs[k], 'values') and not isinstance(kwargs[k], ABCExtensionArray)} for b in self.blocks: if filter is not None: if not b.mgr_locs.isin(filter_locs).any(): result_blocks.append(b) continue if aligned_args: b_items = self.items[b.mgr_locs.indexer] for k, obj in aligned_args.items(): axis = getattr(obj, '_info_axis_number', 0) kwargs[k] = obj.reindex(b_items, axis=axis, copy=align_copy) applied = getattr(b, f)(**kwargs) result_blocks = _extend_blocks(applied, result_blocks) if len(result_blocks) == 0: return self.make_empty(axes or self.axes) bm = self.__class__(result_blocks, axes or self.axes, do_integrity_check=do_integrity_check) bm._consolidate_inplace() return bm def quantile(self, axis=0, consolidate=True, transposed=False, interpolation='linear', qs=None, numeric_only=None): """ Iterate over blocks applying quantile reduction. This routine is intended for reduction type operations and will do inference on the generated blocks. Parameters ---------- axis: reduction axis, default 0 consolidate: boolean, default True. Join together blocks having same dtype transposed: boolean, default False we are holding transposed data interpolation : type of interpolation, default 'linear' qs : a scalar or list of the quantiles to be computed numeric_only : ignored Returns ------- Block Manager (new object) """ # Series dispatches to DataFrame for quantile, which allows us to # simplify some of the code here and in the blocks assert self.ndim >= 2 if consolidate: self._consolidate_inplace() def get_axe(block, qs, axes): from pandas import Float64Index if is_list_like(qs): ax = Float64Index(qs) elif block.ndim == 1: ax = Float64Index([qs]) else: ax = axes[0] return ax axes, blocks = [], [] for b in self.blocks: block = b.quantile(axis=axis, qs=qs, interpolation=interpolation) axe = get_axe(b, qs, axes=self.axes) axes.append(axe) blocks.append(block) # note that some DatetimeTZ, Categorical are always ndim==1 ndim = {b.ndim for b in blocks} assert 0 not in ndim, ndim if 2 in ndim: new_axes = list(self.axes) # multiple blocks that are reduced if len(blocks) > 1: new_axes[1] = axes[0] # reset the placement to the original for b, sb in zip(blocks, self.blocks): b.mgr_locs = sb.mgr_locs else: new_axes[axis] = Index(np.concatenate( [ax.values for ax in axes])) if transposed: new_axes = new_axes[::-1] blocks = [b.make_block(b.values.T, placement=np.arange(b.shape[1]) ) for b in blocks] return self.__class__(blocks, new_axes) # single block, i.e. ndim == {1} values = _concat._concat_compat([b.values for b in blocks]) # compute the orderings of our original data if len(self.blocks) > 1: indexer = np.empty(len(self.axes[0]), dtype=np.intp) i = 0 for b in self.blocks: for j in b.mgr_locs: indexer[j] = i i = i + 1 values = values.take(indexer) return SingleBlockManager( [make_block(values, ndim=1, placement=np.arange(len(values)))], axes[0]) def isna(self, func, **kwargs): return self.apply('apply', func=func, **kwargs) def where(self, **kwargs): return self.apply('where', **kwargs) def setitem(self, **kwargs): return self.apply('setitem', **kwargs) def putmask(self, **kwargs): return self.apply('putmask', **kwargs) def diff(self, **kwargs): return self.apply('diff', **kwargs) def interpolate(self, **kwargs): return self.apply('interpolate', **kwargs) def shift(self, **kwargs): return self.apply('shift', **kwargs) def fillna(self, **kwargs): return self.apply('fillna', **kwargs) def downcast(self, **kwargs): return self.apply('downcast', **kwargs) def astype(self, dtype, **kwargs): return self.apply('astype', dtype=dtype, **kwargs) def convert(self, **kwargs): return self.apply('convert', **kwargs) def replace(self, **kwargs): return self.apply('replace', **kwargs) def replace_list(self, src_list, dest_list, inplace=False, regex=False): """ do a list replace """ inplace = validate_bool_kwarg(inplace, 'inplace') # figure out our mask a-priori to avoid repeated replacements values = self.as_array() def comp(s, regex=False): """ Generate a bool array by perform an equality check, or perform an element-wise regular expression matching """ if isna(s): return isna(values) if hasattr(s, 'asm8'): return _compare_or_regex_search(maybe_convert_objects(values), getattr(s, 'asm8'), regex) return _compare_or_regex_search(values, s, regex) masks = [comp(s, regex) for i, s in enumerate(src_list)] result_blocks = [] src_len = len(src_list) - 1 for blk in self.blocks: # its possible to get multiple result blocks here # replace ALWAYS will return a list rb = [blk if inplace else blk.copy()] for i, (s, d) in enumerate(zip(src_list, dest_list)): new_rb = [] for b in rb: m = masks[i][b.mgr_locs.indexer] convert = i == src_len result = b._replace_coerce(mask=m, to_replace=s, value=d, inplace=inplace, convert=convert, regex=regex) if m.any(): new_rb = _extend_blocks(result, new_rb) else: new_rb.append(b) rb = new_rb result_blocks.extend(rb) bm = self.__class__(result_blocks, self.axes) bm._consolidate_inplace() return bm def is_consolidated(self): """ Return True if more than one block with the same dtype """ if not self._known_consolidated: self._consolidate_check() return self._is_consolidated def _consolidate_check(self): ftypes = [blk.ftype for blk in self.blocks] self._is_consolidated = len(ftypes) == len(set(ftypes)) self._known_consolidated = True @property def is_mixed_type(self): # Warning, consolidation needs to get checked upstairs self._consolidate_inplace() return len(self.blocks) > 1 @property def is_numeric_mixed_type(self): # Warning, consolidation needs to get checked upstairs self._consolidate_inplace() return all(block.is_numeric for block in self.blocks) @property def is_datelike_mixed_type(self): # Warning, consolidation needs to get checked upstairs self._consolidate_inplace() return any(block.is_datelike for block in self.blocks) @property def any_extension_types(self): """Whether any of the blocks in this manager are extension blocks""" return any(block.is_extension for block in self.blocks) @property def is_view(self): """ return a boolean if we are a single block and are a view """ if len(self.blocks) == 1: return self.blocks[0].is_view # It is technically possible to figure out which blocks are views # e.g. [ b.values.base is not None for b in self.blocks ] # but then we have the case of possibly some blocks being a view # and some blocks not. setting in theory is possible on the non-view # blocks w/o causing a SettingWithCopy raise/warn. But this is a bit # complicated return False def get_bool_data(self, copy=False): """ Parameters ---------- copy : boolean, default False Whether to copy the blocks """ self._consolidate_inplace() return self.combine([b for b in self.blocks if b.is_bool], copy) def get_numeric_data(self, copy=False): """ Parameters ---------- copy : boolean, default False Whether to copy the blocks """ self._consolidate_inplace() return self.combine([b for b in self.blocks if b.is_numeric], copy) def combine(self, blocks, copy=True): """ return a new manager with the blocks """ if len(blocks) == 0: return self.make_empty() # FIXME: optimization potential indexer = np.sort(np.concatenate([b.mgr_locs.as_array for b in blocks])) inv_indexer = lib.get_reverse_indexer(indexer, self.shape[0]) new_blocks = [] for b in blocks: b = b.copy(deep=copy) b.mgr_locs = algos.take_1d(inv_indexer, b.mgr_locs.as_array, axis=0, allow_fill=False) new_blocks.append(b) axes = list(self.axes) axes[0] = self.items.take(indexer) return self.__class__(new_blocks, axes, do_integrity_check=False) def get_slice(self, slobj, axis=0): if axis >= self.ndim: raise IndexError("Requested axis not found in manager") if axis == 0: new_blocks = self._slice_take_blocks_ax0(slobj) else: slicer = [slice(None)] * (axis + 1) slicer[axis] = slobj slicer = tuple(slicer) new_blocks = [blk.getitem_block(slicer) for blk in self.blocks] new_axes = list(self.axes) new_axes[axis] = new_axes[axis][slobj] bm = self.__class__(new_blocks, new_axes, do_integrity_check=False) bm._consolidate_inplace() return bm def __contains__(self, item): return item in self.items @property def nblocks(self): return len(self.blocks) def copy(self, deep=True): """ Make deep or shallow copy of BlockManager Parameters ---------- deep : boolean o rstring, default True If False, return shallow copy (do not copy data) If 'all', copy data and a deep copy of the index Returns ------- copy : BlockManager """ # this preserves the notion of view copying of axes if deep: if deep == 'all': copy = lambda ax: ax.copy(deep=True) else: copy = lambda ax: ax.view() new_axes = [copy(ax) for ax in self.axes] else: new_axes = list(self.axes) return self.apply('copy', axes=new_axes, deep=deep, do_integrity_check=False) def as_array(self, transpose=False, items=None): """Convert the blockmanager data into an numpy array. Parameters ---------- transpose : boolean, default False If True, transpose the return array items : list of strings or None Names of block items that will be included in the returned array. ``None`` means that all block items will be used Returns ------- arr : ndarray """ if len(self.blocks) == 0: arr = np.empty(self.shape, dtype=float) return arr.transpose() if transpose else arr if items is not None: mgr = self.reindex_axis(items, axis=0) else: mgr = self if self._is_single_block and mgr.blocks[0].is_datetimetz: # TODO(Block.get_values): Make DatetimeTZBlock.get_values # always be object dtype. Some callers seem to want the # DatetimeArray (previously DTI) arr = mgr.blocks[0].get_values(dtype=object) elif self._is_single_block or not self.is_mixed_type: arr = np.asarray(mgr.blocks[0].get_values()) else: arr = mgr._interleave() return arr.transpose() if transpose else arr def _interleave(self): """ Return ndarray from blocks with specified item order Items must be contained in the blocks """ from pandas.core.dtypes.common import is_sparse dtype = _interleaved_dtype(self.blocks) # TODO: https://github.com/pandas-dev/pandas/issues/22791 # Give EAs some input on what happens here. Sparse needs this. if is_sparse(dtype): dtype = dtype.subtype elif is_extension_array_dtype(dtype): dtype = 'object' result = np.empty(self.shape, dtype=dtype) itemmask = np.zeros(self.shape[0]) for blk in self.blocks: rl = blk.mgr_locs result[rl.indexer] = blk.get_values(dtype) itemmask[rl.indexer] = 1 if not itemmask.all(): raise AssertionError('Some items were not contained in blocks') return result def to_dict(self, copy=True): """ Return a dict of str(dtype) -> BlockManager Parameters ---------- copy : boolean, default True Returns ------- values : a dict of dtype -> BlockManager Notes ----- This consolidates based on str(dtype) """ self._consolidate_inplace() bd = {} for b in self.blocks: bd.setdefault(str(b.dtype), []).append(b) return {dtype: self.combine(blocks, copy=copy) for dtype, blocks in bd.items()} def xs(self, key, axis=1, copy=True, takeable=False): if axis < 1: raise AssertionError( 'Can only take xs across axis >= 1, got {ax}'.format(ax=axis)) # take by position if takeable: loc = key else: loc = self.axes[axis].get_loc(key) slicer = [slice(None, None) for _ in range(self.ndim)] slicer[axis] = loc slicer = tuple(slicer) new_axes = list(self.axes) # could be an array indexer! if isinstance(loc, (slice, np.ndarray)): new_axes[axis] = new_axes[axis][loc] else: new_axes.pop(axis) new_blocks = [] if len(self.blocks) > 1: # we must copy here as we are mixed type for blk in self.blocks: newb = make_block(values=blk.values[slicer], klass=blk.__class__, placement=blk.mgr_locs) new_blocks.append(newb) elif len(self.blocks) == 1: block = self.blocks[0] vals = block.values[slicer] if copy: vals = vals.copy() new_blocks = [make_block(values=vals, placement=block.mgr_locs, klass=block.__class__)] return self.__class__(new_blocks, new_axes) def fast_xs(self, loc): """ get a cross sectional for a given location in the items ; handle dups return the result, is *could* be a view in the case of a single block """ if len(self.blocks) == 1: return self.blocks[0].iget((slice(None), loc)) items = self.items # non-unique (GH4726) if not items.is_unique: result = self._interleave() if self.ndim == 2: result = result.T return result[loc] # unique dtype = _interleaved_dtype(self.blocks) n = len(items) if is_extension_array_dtype(dtype): # we'll eventually construct an ExtensionArray. result = np.empty(n, dtype=object) else: result = np.empty(n, dtype=dtype) for blk in self.blocks: # Such assignment may incorrectly coerce NaT to None # result[blk.mgr_locs] = blk._slice((slice(None), loc)) for i, rl in enumerate(blk.mgr_locs): result[rl] = blk._try_coerce_result(blk.iget((i, loc))) if is_extension_array_dtype(dtype): result = dtype.construct_array_type()._from_sequence( result, dtype=dtype ) return result def consolidate(self): """ Join together blocks having same dtype Returns ------- y : BlockManager """ if self.is_consolidated(): return self bm = self.__class__(self.blocks, self.axes) bm._is_consolidated = False bm._consolidate_inplace() return bm def _consolidate_inplace(self): if not self.is_consolidated(): self.blocks = tuple(_consolidate(self.blocks)) self._is_consolidated = True self._known_consolidated = True self._rebuild_blknos_and_blklocs() def get(self, item, fastpath=True): """ Return values for selected item (ndarray or BlockManager). """ if self.items.is_unique: if not isna(item): loc = self.items.get_loc(item) else: indexer = np.arange(len(self.items))[isna(self.items)] # allow a single nan location indexer if not is_scalar(indexer): if len(indexer) == 1: loc = indexer.item() else: raise ValueError("cannot label index with a null key") return self.iget(loc, fastpath=fastpath) else: if isna(item): raise TypeError("cannot label index with a null key") indexer = self.items.get_indexer_for([item]) return self.reindex_indexer(new_axis=self.items[indexer], indexer=indexer, axis=0, allow_dups=True) def iget(self, i, fastpath=True): """ Return the data as a SingleBlockManager if fastpath=True and possible Otherwise return as a ndarray """ block = self.blocks[self._blknos[i]] values = block.iget(self._blklocs[i]) if not fastpath or not block._box_to_block_values or values.ndim != 1: return values # fastpath shortcut for select a single-dim from a 2-dim BM return SingleBlockManager( [block.make_block_same_class(values, placement=slice(0, len(values)), ndim=1)], self.axes[1]) def delete(self, item): """ Delete selected item (items if non-unique) in-place. """ indexer = self.items.get_loc(item) is_deleted = np.zeros(self.shape[0], dtype=np.bool_) is_deleted[indexer] = True ref_loc_offset = -is_deleted.cumsum() is_blk_deleted = [False] * len(self.blocks) if isinstance(indexer, int): affected_start = indexer else: affected_start = is_deleted.nonzero()[0][0] for blkno, _ in _fast_count_smallints(self._blknos[affected_start:]): blk = self.blocks[blkno] bml = blk.mgr_locs blk_del = is_deleted[bml.indexer].nonzero()[0] if len(blk_del) == len(bml): is_blk_deleted[blkno] = True continue elif len(blk_del) != 0: blk.delete(blk_del) bml = blk.mgr_locs blk.mgr_locs = bml.add(ref_loc_offset[bml.indexer]) # FIXME: use Index.delete as soon as it uses fastpath=True self.axes[0] = self.items[~is_deleted] self.blocks = tuple(b for blkno, b in enumerate(self.blocks) if not is_blk_deleted[blkno]) self._shape = None self._rebuild_blknos_and_blklocs() def set(self, item, value): """ Set new item in-place. Does not consolidate. Adds new Block if not contained in the current set of items """ # FIXME: refactor, clearly separate broadcasting & zip-like assignment # can prob also fix the various if tests for sparse/categorical # TODO(EA): Remove an is_extension_ when all extension types satisfy # the interface value_is_extension_type = (is_extension_type(value) or is_extension_array_dtype(value)) # categorical/spares/datetimetz if value_is_extension_type: def value_getitem(placement): return value else: if value.ndim == self.ndim - 1: value = _safe_reshape(value, (1,) + value.shape) def value_getitem(placement): return value else: def value_getitem(placement): return value[placement.indexer] if value.shape[1:] != self.shape[1:]: raise AssertionError('Shape of new values must be compatible ' 'with manager shape') try: loc = self.items.get_loc(item) except KeyError: # This item wasn't present, just insert at end self.insert(len(self.items), item, value) return if isinstance(loc, int): loc = [loc] blknos = self._blknos[loc] blklocs = self._blklocs[loc].copy() unfit_mgr_locs = [] unfit_val_locs = [] removed_blknos = [] for blkno, val_locs in libinternals.get_blkno_placements(blknos, self.nblocks, group=True): blk = self.blocks[blkno] blk_locs = blklocs[val_locs.indexer] if blk.should_store(value): blk.set(blk_locs, value_getitem(val_locs)) else: unfit_mgr_locs.append(blk.mgr_locs.as_array[blk_locs]) unfit_val_locs.append(val_locs) # If all block items are unfit, schedule the block for removal. if len(val_locs) == len(blk.mgr_locs): removed_blknos.append(blkno) else: self._blklocs[blk.mgr_locs.indexer] = -1 blk.delete(blk_locs) self._blklocs[blk.mgr_locs.indexer] = np.arange(len(blk)) if len(removed_blknos): # Remove blocks & update blknos accordingly is_deleted = np.zeros(self.nblocks, dtype=np.bool_) is_deleted[removed_blknos] = True new_blknos = np.empty(self.nblocks, dtype=np.int64) new_blknos.fill(-1) new_blknos[~is_deleted] = np.arange(self.nblocks - len(removed_blknos)) self._blknos = algos.take_1d(new_blknos, self._blknos, axis=0, allow_fill=False) self.blocks = tuple(blk for i, blk in enumerate(self.blocks) if i not in set(removed_blknos)) if unfit_val_locs: unfit_mgr_locs = np.concatenate(unfit_mgr_locs) unfit_count = len(unfit_mgr_locs) new_blocks = [] if value_is_extension_type: # This code (ab-)uses the fact that sparse blocks contain only # one item. new_blocks.extend( make_block(values=value.copy(), ndim=self.ndim, placement=slice(mgr_loc, mgr_loc + 1)) for mgr_loc in unfit_mgr_locs) self._blknos[unfit_mgr_locs] = (np.arange(unfit_count) + len(self.blocks)) self._blklocs[unfit_mgr_locs] = 0 else: # unfit_val_locs contains BlockPlacement objects unfit_val_items = unfit_val_locs[0].append(unfit_val_locs[1:]) new_blocks.append( make_block(values=value_getitem(unfit_val_items), ndim=self.ndim, placement=unfit_mgr_locs)) self._blknos[unfit_mgr_locs] = len(self.blocks) self._blklocs[unfit_mgr_locs] = np.arange(unfit_count) self.blocks += tuple(new_blocks) # Newly created block's dtype may already be present. self._known_consolidated = False def insert(self, loc, item, value, allow_duplicates=False): """ Insert item at selected position. Parameters ---------- loc : int item : hashable value : array_like allow_duplicates: bool If False, trying to insert non-unique item will raise """ if not allow_duplicates and item in self.items: # Should this be a different kind of error?? raise ValueError('cannot insert {}, already exists'.format(item)) if not isinstance(loc, int): raise TypeError("loc must be int") # insert to the axis; this could possibly raise a TypeError new_axis = self.items.insert(loc, item) block = make_block(values=value, ndim=self.ndim, placement=slice(loc, loc + 1)) for blkno, count in _fast_count_smallints(self._blknos[loc:]): blk = self.blocks[blkno] if count == len(blk.mgr_locs): blk.mgr_locs = blk.mgr_locs.add(1) else: new_mgr_locs = blk.mgr_locs.as_array.copy() new_mgr_locs[new_mgr_locs >= loc] += 1 blk.mgr_locs = new_mgr_locs if loc == self._blklocs.shape[0]: # np.append is a lot faster, let's use it if we can. self._blklocs = np.append(self._blklocs, 0) self._blknos = np.append(self._blknos, len(self.blocks)) else: self._blklocs = np.insert(self._blklocs, loc, 0) self._blknos = np.insert(self._blknos, loc, len(self.blocks)) self.axes[0] = new_axis self.blocks += (block,) self._shape = None self._known_consolidated = False if len(self.blocks) > 100: self._consolidate_inplace() def reindex_axis(self, new_index, axis, method=None, limit=None, fill_value=None, copy=True): """ Conform block manager to new index. """ new_index = ensure_index(new_index) new_index, indexer = self.axes[axis].reindex(new_index, method=method, limit=limit) return self.reindex_indexer(new_index, indexer, axis=axis, fill_value=fill_value, copy=copy) def reindex_indexer(self, new_axis, indexer, axis, fill_value=None, allow_dups=False, copy=True): """ Parameters ---------- new_axis : Index indexer : ndarray of int64 or None axis : int fill_value : object allow_dups : bool pandas-indexer with -1's only. """ if indexer is None: if new_axis is self.axes[axis] and not copy: return self result = self.copy(deep=copy) result.axes = list(self.axes) result.axes[axis] = new_axis return result self._consolidate_inplace() # some axes don't allow reindexing with dups if not allow_dups: self.axes[axis]._can_reindex(indexer) if axis >= self.ndim: raise IndexError("Requested axis not found in manager") if axis == 0: new_blocks = self._slice_take_blocks_ax0(indexer, fill_tuple=(fill_value,)) else: new_blocks = [blk.take_nd(indexer, axis=axis, fill_tuple=( fill_value if fill_value is not None else blk.fill_value,)) for blk in self.blocks] new_axes = list(self.axes) new_axes[axis] = new_axis return self.__class__(new_blocks, new_axes) def _slice_take_blocks_ax0(self, slice_or_indexer, fill_tuple=None): """ Slice/take blocks along axis=0. Overloaded for SingleBlock Returns ------- new_blocks : list of Block """ allow_fill = fill_tuple is not None sl_type, slobj, sllen = _preprocess_slice_or_indexer( slice_or_indexer, self.shape[0], allow_fill=allow_fill) if self._is_single_block: blk = self.blocks[0] if sl_type in ('slice', 'mask'): return [blk.getitem_block(slobj, new_mgr_locs=slice(0, sllen))] elif not allow_fill or self.ndim == 1: if allow_fill and fill_tuple[0] is None: _, fill_value = maybe_promote(blk.dtype) fill_tuple = (fill_value, ) return [blk.take_nd(slobj, axis=0, new_mgr_locs=slice(0, sllen), fill_tuple=fill_tuple)] if sl_type in ('slice', 'mask'): blknos = self._blknos[slobj] blklocs = self._blklocs[slobj] else: blknos = algos.take_1d(self._blknos, slobj, fill_value=-1, allow_fill=allow_fill) blklocs = algos.take_1d(self._blklocs, slobj, fill_value=-1, allow_fill=allow_fill) # When filling blknos, make sure blknos is updated before appending to # blocks list, that way new blkno is exactly len(blocks). # # FIXME: mgr_groupby_blknos must return mgr_locs in ascending order, # pytables serialization will break otherwise. blocks = [] for blkno, mgr_locs in libinternals.get_blkno_placements(blknos, self.nblocks, group=True): if blkno == -1: # If we've got here, fill_tuple was not None. fill_value = fill_tuple[0] blocks.append(self._make_na_block(placement=mgr_locs, fill_value=fill_value)) else: blk = self.blocks[blkno] # Otherwise, slicing along items axis is necessary. if not blk._can_consolidate: # A non-consolidatable block, it's easy, because there's # only one item and each mgr loc is a copy of that single # item. for mgr_loc in mgr_locs: newblk = blk.copy(deep=True) newblk.mgr_locs = slice(mgr_loc, mgr_loc + 1) blocks.append(newblk) else: blocks.append(blk.take_nd(blklocs[mgr_locs.indexer], axis=0, new_mgr_locs=mgr_locs, fill_tuple=None)) return blocks def _make_na_block(self, placement, fill_value=None): # TODO: infer dtypes other than float64 from fill_value if fill_value is None: fill_value = np.nan block_shape = list(self.shape) block_shape[0] = len(placement) dtype, fill_value = infer_dtype_from_scalar(fill_value) block_values = np.empty(block_shape, dtype=dtype) block_values.fill(fill_value) return make_block(block_values, placement=placement) def take(self, indexer, axis=1, verify=True, convert=True): """ Take items along any axis. """ self._consolidate_inplace() indexer = (np.arange(indexer.start, indexer.stop, indexer.step, dtype='int64') if isinstance(indexer, slice) else np.asanyarray(indexer, dtype='int64')) n = self.shape[axis] if convert: indexer = maybe_convert_indices(indexer, n) if verify: if ((indexer == -1) | (indexer >= n)).any(): raise Exception('Indices must be nonzero and less than ' 'the axis length') new_labels = self.axes[axis].take(indexer) return self.reindex_indexer(new_axis=new_labels, indexer=indexer, axis=axis, allow_dups=True) def merge(self, other, lsuffix='', rsuffix=''): # We assume at this point that the axes of self and other match. # This is only called from Panel.join, which reindexes prior # to calling to ensure this assumption holds. l, r = items_overlap_with_suffix(left=self.items, lsuffix=lsuffix, right=other.items, rsuffix=rsuffix) new_items = _concat_indexes([l, r]) new_blocks = [blk.copy(deep=False) for blk in self.blocks] offset = self.shape[0] for blk in other.blocks: blk = blk.copy(deep=False) blk.mgr_locs = blk.mgr_locs.add(offset) new_blocks.append(blk) new_axes = list(self.axes) new_axes[0] = new_items return self.__class__(_consolidate(new_blocks), new_axes) def equals(self, other): self_axes, other_axes = self.axes, other.axes if len(self_axes) != len(other_axes): return False if not all(ax1.equals(ax2) for ax1, ax2 in zip(self_axes, other_axes)): return False self._consolidate_inplace() other._consolidate_inplace() if len(self.blocks) != len(other.blocks): return False # canonicalize block order, using a tuple combining the type # name and then mgr_locs because there might be unconsolidated # blocks (say, Categorical) which can only be distinguished by # the iteration order def canonicalize(block): return (block.dtype.name, block.mgr_locs.as_array.tolist()) self_blocks = sorted(self.blocks, key=canonicalize) other_blocks = sorted(other.blocks, key=canonicalize) return all(block.equals(oblock) for block, oblock in zip(self_blocks, other_blocks)) def unstack(self, unstacker_func, fill_value): """Return a blockmanager with all blocks unstacked. Parameters ---------- unstacker_func : callable A (partially-applied) ``pd.core.reshape._Unstacker`` class. fill_value : Any fill_value for newly introduced missing values. Returns ------- unstacked : BlockManager """ n_rows = self.shape[-1] dummy = unstacker_func(np.empty((0, 0)), value_columns=self.items) new_columns = dummy.get_new_columns() new_index = dummy.get_new_index() new_blocks = [] columns_mask = [] for blk in self.blocks: blocks, mask = blk._unstack( partial(unstacker_func, value_columns=self.items[blk.mgr_locs.indexer]), new_columns, n_rows, fill_value ) new_blocks.extend(blocks) columns_mask.extend(mask) new_columns = new_columns[columns_mask] bm = BlockManager(new_blocks, [new_columns, new_index]) return bm class SingleBlockManager(BlockManager): """ manage a single block with """ ndim = 1 _is_consolidated = True _known_consolidated = True __slots__ = () def __init__(self, block, axis, do_integrity_check=False, fastpath=False): if isinstance(axis, list): if len(axis) != 1: raise ValueError("cannot create SingleBlockManager with more " "than 1 axis") axis = axis[0] # passed from constructor, single block, single axis if fastpath: self.axes = [axis] if isinstance(block, list): # empty block if len(block) == 0: block = [np.array([])] elif len(block) != 1: raise ValueError('Cannot create SingleBlockManager with ' 'more than 1 block') block = block[0] else: self.axes = [ensure_index(axis)] # create the block here if isinstance(block, list): # provide consolidation to the interleaved_dtype if len(block) > 1: dtype = _interleaved_dtype(block) block = [b.astype(dtype) for b in block] block = _consolidate(block) if len(block) != 1: raise ValueError('Cannot create SingleBlockManager with ' 'more than 1 block') block = block[0] if not isinstance(block, Block): block = make_block(block, placement=slice(0, len(axis)), ndim=1) self.blocks = [block] def _post_setstate(self): pass @property def _block(self): return self.blocks[0] @property def _values(self): return self._block.values @property def _blknos(self): """ compat with BlockManager """ return None @property def _blklocs(self): """ compat with BlockManager """ return None def get_slice(self, slobj, axis=0): if axis >= self.ndim: raise IndexError("Requested axis not found in manager") return self.__class__(self._block._slice(slobj), self.index[slobj], fastpath=True) @property def index(self): return self.axes[0] def convert(self, **kwargs): """ convert the whole block as one """ kwargs['by_item'] = False return self.apply('convert', **kwargs) @property def dtype(self): return self._block.dtype @property def array_dtype(self): return self._block.array_dtype @property def ftype(self): return self._block.ftype def get_dtype_counts(self): return {self.dtype.name: 1} def get_ftype_counts(self): return {self.ftype: 1} def get_dtypes(self): return np.array([self._block.dtype]) def get_ftypes(self): return np.array([self._block.ftype]) def external_values(self): return self._block.external_values() def internal_values(self): return self._block.internal_values() def formatting_values(self): """Return the internal values used by the DataFrame/SeriesFormatter""" return self._block.formatting_values() def get_values(self): """ return a dense type view """ return np.array(self._block.to_dense(), copy=False) @property def asobject(self): """ return a object dtype array. datetime/timedelta like values are boxed to Timestamp/Timedelta instances. """ return self._block.get_values(dtype=object) @property def _can_hold_na(self): return self._block._can_hold_na def is_consolidated(self): return True def _consolidate_check(self): pass def _consolidate_inplace(self): pass def delete(self, item): """ Delete single item from SingleBlockManager. Ensures that self.blocks doesn't become empty. """ loc = self.items.get_loc(item) self._block.delete(loc) self.axes[0] = self.axes[0].delete(loc) def fast_xs(self, loc): """ fast path for getting a cross-section return a view of the data """ return self._block.values[loc] def concat(self, to_concat, new_axis): """ Concatenate a list of SingleBlockManagers into a single SingleBlockManager. Used for pd.concat of Series objects with axis=0. Parameters ---------- to_concat : list of SingleBlockManagers new_axis : Index of the result Returns ------- SingleBlockManager """ non_empties = [x for x in to_concat if len(x) > 0] # check if all series are of the same block type: if len(non_empties) > 0: blocks = [obj.blocks[0] for obj in non_empties] if len({b.dtype for b in blocks}) == 1: new_block = blocks[0].concat_same_type(blocks) else: values = [x.values for x in blocks] values = _concat._concat_compat(values) new_block = make_block( values, placement=slice(0, len(values), 1)) else: values = [x._block.values for x in to_concat] values = _concat._concat_compat(values) new_block = make_block( values, placement=slice(0, len(values), 1)) mgr = SingleBlockManager(new_block, new_axis) return mgr # -------------------------------------------------------------------- # Constructor Helpers def create_block_manager_from_blocks(blocks, axes): try: if len(blocks) == 1 and not isinstance(blocks[0], Block): # if blocks[0] is of length 0, return empty blocks if not len(blocks[0]): blocks = [] else: # It's OK if a single block is passed as values, its placement # is basically "all items", but if there're many, don't bother # converting, it's an error anyway. blocks = [make_block(values=blocks[0], placement=slice(0, len(axes[0])))] mgr = BlockManager(blocks, axes) mgr._consolidate_inplace() return mgr except (ValueError) as e: blocks = [getattr(b, 'values', b) for b in blocks] tot_items = sum(b.shape[0] for b in blocks) construction_error(tot_items, blocks[0].shape[1:], axes, e) def create_block_manager_from_arrays(arrays, names, axes): try: blocks = form_blocks(arrays, names, axes) mgr = BlockManager(blocks, axes) mgr._consolidate_inplace() return mgr except ValueError as e: construction_error(len(arrays), arrays[0].shape, axes, e) def construction_error(tot_items, block_shape, axes, e=None): """ raise a helpful message about our construction """ passed = tuple(map(int, [tot_items] + list(block_shape))) # Correcting the user facing error message during dataframe construction if len(passed) <= 2: passed = passed[::-1] implied = tuple(len(ax) for ax in axes) # Correcting the user facing error message during dataframe construction if len(implied) <= 2: implied = implied[::-1] if passed == implied and e is not None: raise e if block_shape[0] == 0: raise ValueError("Empty data passed with indices specified.") raise ValueError("Shape of passed values is {0}, indices imply {1}".format( passed, implied)) # ----------------------------------------------------------------------- def form_blocks(arrays, names, axes): # put "leftover" items in float bucket, where else? # generalize? items_dict = defaultdict(list) extra_locs = [] names_idx = ensure_index(names) if names_idx.equals(axes[0]): names_indexer = np.arange(len(names_idx)) else: assert names_idx.intersection(axes[0]).is_unique names_indexer = names_idx.get_indexer_for(axes[0]) for i, name_idx in enumerate(names_indexer): if name_idx == -1: extra_locs.append(i) continue k = names[name_idx] v = arrays[name_idx] block_type = get_block_type(v) items_dict[block_type.__name__].append((i, k, v)) blocks = [] if len(items_dict['FloatBlock']): float_blocks = _multi_blockify(items_dict['FloatBlock']) blocks.extend(float_blocks) if len(items_dict['ComplexBlock']): complex_blocks = _multi_blockify(items_dict['ComplexBlock']) blocks.extend(complex_blocks) if len(items_dict['TimeDeltaBlock']): timedelta_blocks = _multi_blockify(items_dict['TimeDeltaBlock']) blocks.extend(timedelta_blocks) if len(items_dict['IntBlock']): int_blocks = _multi_blockify(items_dict['IntBlock']) blocks.extend(int_blocks) if len(items_dict['DatetimeBlock']): datetime_blocks = _simple_blockify(items_dict['DatetimeBlock'], _NS_DTYPE) blocks.extend(datetime_blocks) if len(items_dict['DatetimeTZBlock']): dttz_blocks = [make_block(array, klass=DatetimeTZBlock, placement=[i]) for i, _, array in items_dict['DatetimeTZBlock']] blocks.extend(dttz_blocks) if len(items_dict['BoolBlock']): bool_blocks = _simple_blockify(items_dict['BoolBlock'], np.bool_) blocks.extend(bool_blocks) if len(items_dict['ObjectBlock']) > 0: object_blocks = _simple_blockify(items_dict['ObjectBlock'], np.object_) blocks.extend(object_blocks) if len(items_dict['SparseBlock']) > 0: sparse_blocks = _sparse_blockify(items_dict['SparseBlock']) blocks.extend(sparse_blocks) if len(items_dict['CategoricalBlock']) > 0: cat_blocks = [make_block(array, klass=CategoricalBlock, placement=[i]) for i, _, array in items_dict['CategoricalBlock']] blocks.extend(cat_blocks) if len(items_dict['ExtensionBlock']): external_blocks = [ make_block(array, klass=ExtensionBlock, placement=[i]) for i, _, array in items_dict['ExtensionBlock'] ] blocks.extend(external_blocks) if len(items_dict['ObjectValuesExtensionBlock']): external_blocks = [ make_block(array, klass=ObjectValuesExtensionBlock, placement=[i]) for i, _, array in items_dict['ObjectValuesExtensionBlock'] ] blocks.extend(external_blocks) if len(extra_locs): shape = (len(extra_locs),) + tuple(len(x) for x in axes[1:]) # empty items -> dtype object block_values = np.empty(shape, dtype=object) block_values.fill(np.nan) na_block = make_block(block_values, placement=extra_locs) blocks.append(na_block) return blocks def _simple_blockify(tuples, dtype): """ return a single array of a block that has a single dtype; if dtype is not None, coerce to this dtype """ values, placement = _stack_arrays(tuples, dtype) # CHECK DTYPE? if dtype is not None and values.dtype != dtype: # pragma: no cover values = values.astype(dtype) block = make_block(values, placement=placement) return [block] def _multi_blockify(tuples, dtype=None): """ return an array of blocks that potentially have different dtypes """ # group by dtype grouper = itertools.groupby(tuples, lambda x: x[2].dtype) new_blocks = [] for dtype, tup_block in grouper: values, placement = _stack_arrays(list(tup_block), dtype) block = make_block(values, placement=placement) new_blocks.append(block) return new_blocks def _sparse_blockify(tuples, dtype=None): """ return an array of blocks that potentially have different dtypes (and are sparse) """ new_blocks = [] for i, names, array in tuples: array = _maybe_to_sparse(array) block = make_block(array, placement=[i]) new_blocks.append(block) return new_blocks def _stack_arrays(tuples, dtype): # fml def _asarray_compat(x): if isinstance(x, ABCSeries): return x._values else: return np.asarray(x) def _shape_compat(x): if isinstance(x, ABCSeries): return len(x), else: return x.shape placement, names, arrays = zip(*tuples) first = arrays[0] shape = (len(arrays),) + _shape_compat(first) stacked = np.empty(shape, dtype=dtype) for i, arr in enumerate(arrays): stacked[i] = _asarray_compat(arr) return stacked, placement def _interleaved_dtype(blocks): # type: (List[Block]) -> Optional[Union[np.dtype, ExtensionDtype]] """Find the common dtype for `blocks`. Parameters ---------- blocks : List[Block] Returns ------- dtype : Optional[Union[np.dtype, ExtensionDtype]] None is returned when `blocks` is empty. """ if not len(blocks): return None return find_common_type([b.dtype for b in blocks]) def _consolidate(blocks): """ Merge blocks having same dtype, exclude non-consolidating blocks """ # sort by _can_consolidate, dtype gkey = lambda x: x._consolidate_key grouper = itertools.groupby(sorted(blocks, key=gkey), gkey) new_blocks = [] for (_can_consolidate, dtype), group_blocks in grouper: merged_blocks = _merge_blocks(list(group_blocks), dtype=dtype, _can_consolidate=_can_consolidate) new_blocks = _extend_blocks(merged_blocks, new_blocks) return new_blocks def _compare_or_regex_search(a, b, regex=False): """ Compare two array_like inputs of the same shape or two scalar values Calls operator.eq or re.search, depending on regex argument. If regex is True, perform an element-wise regex matching. Parameters ---------- a : array_like or scalar b : array_like or scalar regex : bool, default False Returns ------- mask : array_like of bool """ if not regex: op = lambda x: operator.eq(x, b) else: op = np.vectorize(lambda x: bool(re.search(b, x)) if isinstance(x, str) else False) is_a_array = isinstance(a, np.ndarray) is_b_array = isinstance(b, np.ndarray) # numpy deprecation warning to have i8 vs integer comparisons if is_datetimelike_v_numeric(a, b): result = False # numpy deprecation warning if comparing numeric vs string-like elif is_numeric_v_string_like(a, b): result = False else: result = op(a) if is_scalar(result) and (is_a_array or is_b_array): type_names = [type(a).__name__, type(b).__name__] if is_a_array: type_names[0] = 'ndarray(dtype={dtype})'.format(dtype=a.dtype) if is_b_array: type_names[1] = 'ndarray(dtype={dtype})'.format(dtype=b.dtype) raise TypeError( "Cannot compare types {a!r} and {b!r}".format(a=type_names[0], b=type_names[1])) return result def _concat_indexes(indexes): return indexes[0].append(indexes[1:]) def items_overlap_with_suffix(left, lsuffix, right, rsuffix): """ If two indices overlap, add suffixes to overlapping entries. If corresponding suffix is empty, the entry is simply converted to string. """ to_rename = left.intersection(right) if len(to_rename) == 0: return left, right else: if not lsuffix and not rsuffix: raise ValueError('columns overlap but no suffix specified: ' '{rename}'.format(rename=to_rename)) def renamer(x, suffix): """Rename the left and right indices. If there is overlap, and suffix is not None, add suffix, otherwise, leave it as-is. Parameters ---------- x : original column name suffix : str or None Returns ------- x : renamed column name """ if x in to_rename and suffix is not None: return '{x}{suffix}'.format(x=x, suffix=suffix) return x lrenamer = partial(renamer, suffix=lsuffix) rrenamer = partial(renamer, suffix=rsuffix) return (_transform_index(left, lrenamer), _transform_index(right, rrenamer)) def _transform_index(index, func, level=None): """ Apply function to all values found in index. This includes transforming multiindex entries separately. Only apply function to one level of the MultiIndex if level is specified. """ if isinstance(index, MultiIndex): if level is not None: items = [tuple(func(y) if i == level else y for i, y in enumerate(x)) for x in index] else: items = [tuple(func(y) for y in x) for x in index] return MultiIndex.from_tuples(items, names=index.names) else: items = [func(x) for x in index] return Index(items, name=index.name, tupleize_cols=False) def _fast_count_smallints(arr): """Faster version of set(arr) for sequences of small numbers.""" counts = np.bincount(arr.astype(np.int_)) nz = counts.nonzero()[0] return np.c_[nz, counts[nz]] def _preprocess_slice_or_indexer(slice_or_indexer, length, allow_fill): if isinstance(slice_or_indexer, slice): return ('slice', slice_or_indexer, libinternals.slice_len(slice_or_indexer, length)) elif (isinstance(slice_or_indexer, np.ndarray) and slice_or_indexer.dtype == np.bool_): return 'mask', slice_or_indexer, slice_or_indexer.sum() else: indexer = np.asanyarray(slice_or_indexer, dtype=np.int64) if not allow_fill: indexer = maybe_convert_indices(indexer, length) return 'fancy', indexer, len(indexer) def concatenate_block_managers(mgrs_indexers, axes, concat_axis, copy): """ Concatenate block managers into one. Parameters ---------- mgrs_indexers : list of (BlockManager, {axis: indexer,...}) tuples axes : list of Index concat_axis : int copy : bool """ concat_plans = [get_mgr_concatenation_plan(mgr, indexers) for mgr, indexers in mgrs_indexers] concat_plan = combine_concat_plans(concat_plans, concat_axis) blocks = [] for placement, join_units in concat_plan: if len(join_units) == 1 and not join_units[0].indexers: b = join_units[0].block values = b.values if copy: values = values.copy() elif not copy: values = values.view() b = b.make_block_same_class(values, placement=placement) elif is_uniform_join_units(join_units): b = join_units[0].block.concat_same_type( [ju.block for ju in join_units], placement=placement) else: b = make_block( concatenate_join_units(join_units, concat_axis, copy=copy), placement=placement) blocks.append(b) return BlockManager(blocks, axes)
# -*- coding: utf-8 -*- import numpy as np import pytest from pandas import Timestamp from pandas.util.testing import assert_numpy_array_equal def test_assert_numpy_array_equal_shape_mismatch(): msg = """numpy array are different numpy array shapes are different \\[left\\]: \\(2L*,\\) \\[right\\]: \\(3L*,\\)""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([3, 4, 5])) def test_assert_numpy_array_equal_bad_type(): expected = "Expected type" with pytest.raises(AssertionError, match=expected): assert_numpy_array_equal(1, 2) @pytest.mark.parametrize("a,b,klass1,klass2", [ (np.array([1]), 1, "ndarray", "int"), (1, np.array([1]), "int", "ndarray"), ]) def test_assert_numpy_array_equal_class_mismatch(a, b, klass1, klass2): msg = """numpy array are different numpy array classes are different \\[left\\]: {klass1} \\[right\\]: {klass2}""".format(klass1=klass1, klass2=klass2) with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, b) def test_assert_numpy_array_equal_value_mismatch1(): msg = """numpy array are different numpy array values are different \\(66\\.66667 %\\) \\[left\\]: \\[nan, 2\\.0, 3\\.0\\] \\[right\\]: \\[1\\.0, nan, 3\\.0\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([np.nan, 2, 3]), np.array([1, np.nan, 3])) def test_assert_numpy_array_equal_value_mismatch2(): msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[1, 2\\] \\[right\\]: \\[1, 3\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([1, 3])) def test_assert_numpy_array_equal_value_mismatch3(): msg = """numpy array are different numpy array values are different \\(16\\.66667 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\], \\[5, 6\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\], \\[5, 6\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4], [5, 6]]), np.array([[1, 3], [3, 4], [5, 6]])) def test_assert_numpy_array_equal_value_mismatch4(): msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[1\\.1, 2\\.000001\\] \\[right\\]: \\[1\\.1, 2.0\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1.1, 2.000001]), np.array([1.1, 2.0])) def test_assert_numpy_array_equal_value_mismatch5(): msg = """numpy array are different numpy array values are different \\(16\\.66667 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\], \\[5, 6\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\], \\[5, 6\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4], [5, 6]]), np.array([[1, 3], [3, 4], [5, 6]])) def test_assert_numpy_array_equal_value_mismatch6(): msg = """numpy array are different numpy array values are different \\(25\\.0 %\\) \\[left\\]: \\[\\[1, 2\\], \\[3, 4\\]\\] \\[right\\]: \\[\\[1, 3\\], \\[3, 4\\]\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([[1, 2], [3, 4]]), np.array([[1, 3], [3, 4]])) def test_assert_numpy_array_equal_shape_mismatch_override(): msg = """Index are different Index shapes are different \\[left\\]: \\(2L*,\\) \\[right\\]: \\(3L*,\\)""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([1, 2]), np.array([3, 4, 5]), obj="Index") def test_numpy_array_equal_unicode(): # see gh-20503 # # Test ensures that `assert_numpy_array_equals` raises the right # exception when comparing np.arrays containing differing unicode objects. msg = """numpy array are different numpy array values are different \\(33\\.33333 %\\) \\[left\\]: \\[á, à, ä\\] \\[right\\]: \\[á, à, å\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(np.array([u"á", u"à", u"ä"]), np.array([u"á", u"à", u"å"])) def test_numpy_array_equal_object(): a = np.array([Timestamp("2011-01-01"), Timestamp("2011-01-01")]) b = np.array([Timestamp("2011-01-01"), Timestamp("2011-01-02")]) msg = """numpy array are different numpy array values are different \\(50\\.0 %\\) \\[left\\]: \\[2011-01-01 00:00:00, 2011-01-01 00:00:00\\] \\[right\\]: \\[2011-01-01 00:00:00, 2011-01-02 00:00:00\\]""" with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, b) @pytest.mark.parametrize("other_type", ["same", "copy"]) @pytest.mark.parametrize("check_same", ["same", "copy"]) def test_numpy_array_equal_copy_flag(other_type, check_same): a = np.array([1, 2, 3]) msg = None if other_type == "same": other = a.view() else: other = a.copy() if check_same != other_type: msg = (r"array\(\[1, 2, 3\]\) is not array\(\[1, 2, 3\]\)" if check_same == "same" else r"array\(\[1, 2, 3\]\) is array\(\[1, 2, 3\]\)") if msg is not None: with pytest.raises(AssertionError, match=msg): assert_numpy_array_equal(a, other, check_same=check_same) else: assert_numpy_array_equal(a, other, check_same=check_same)
MJuddBooth/pandas
pandas/tests/util/test_assert_numpy_array_equal.py
pandas/core/internals/managers.py
"""This module contains various functions that are special cases of incomplete gamma functions. It should probably be renamed. """ from ...core import (Add, EulerGamma, Function, I, Integer, Pow, Rational, cacheit, expand_mul, oo, pi, zoo) from ...core.function import ArgumentIndexError from ...core.sympify import sympify from ..combinatorial.factorials import factorial from ..elementary.complexes import polar_lift from ..elementary.exponential import exp, log from ..elementary.hyperbolic import cosh, sinh from ..elementary.integers import floor from ..elementary.miscellaneous import root, sqrt from ..elementary.trigonometric import cos, sin from .hyper import hyper, meijerg # TODO series expansions # TODO see the "Note:" in Ei ############################################################################### # ############################## ERROR FUNCTION ############################# # ############################################################################### class erf(Function): r""" The Gauss error function. This function is defined as: .. math :: \mathrm{erf}(x) = \frac{2}{\sqrt{\pi}} \int_0^x e^{-t^2} \mathrm{d}t. Examples ======== Several special values are known: >>> erf(0) 0 >>> erf(oo) 1 >>> erf(-oo) -1 >>> erf(I*oo) oo*I >>> erf(-I*oo) -oo*I In general one can pull out factors of -1 and I from the argument: >>> erf(-z) -erf(z) The error function obeys the mirror symmetry: >>> conjugate(erf(z)) erf(conjugate(z)) Differentiation with respect to z is supported: >>> diff(erf(z), z) 2*E**(-z**2)/sqrt(pi) We can numerically evaluate the error function to arbitrary precision on the whole complex plane: >>> erf(4).evalf(30) 0.999999984582742099719981147840 >>> erf(-4*I).evalf(30) -1296959.73071763923152794095062*I See Also ======== erfc: Complementary error function. erfi: Imaginary error function. erf2: Two-argument error function. erfinv: Inverse error function. erfcinv: Inverse Complementary error function. erf2inv: Inverse two-argument error function. References ========== * https://en.wikipedia.org/wiki/Error_function * https://dlmf.nist.gov/7 * https://mathworld.wolfram.com/Erf.html * http://functions.wolfram.com/GammaBetaErf/Erf """ unbranched = True def fdiff(self, argindex=1): if argindex == 1: return 2*exp(-self.args[0]**2)/sqrt(pi) else: raise ArgumentIndexError(self, argindex) def inverse(self, argindex=1): """Returns the inverse of this function.""" return erfinv @classmethod def eval(cls, arg): if arg.is_Number: if arg is oo: return Integer(1) elif arg == -oo: return Integer(-1) elif arg == 0: return Integer(0) if isinstance(arg, erfinv): return arg.args[0] if isinstance(arg, erfcinv): return 1 - arg.args[0] # Try to pull out factors of I t = arg.extract_multiplicatively(I) if t in (oo, -oo): return arg # Try to pull out factors of -1 if arg.could_extract_minus_sign(): return -cls(-arg) @staticmethod @cacheit def taylor_term(n, x, *previous_terms): if n < 0 or n % 2 == 0: return Integer(0) else: x = sympify(x) k = floor(Rational(n - 1, 2)) if len(previous_terms) >= 2: return -previous_terms[-2] * x**2 * (n - 2)/(n*k) else: return 2*(-1)**k * x**n/(n*factorial(k)*sqrt(pi)) def _eval_conjugate(self): return self.func(self.args[0].conjugate()) def _eval_is_extended_real(self): arg = self.args[0] if arg.is_extended_real: return True elif arg.is_imaginary and arg.is_nonzero: return False def _eval_rewrite_as_uppergamma(self, z): from .gamma_functions import uppergamma return sqrt(z**2)/z*(1 - uppergamma(Rational(1, 2), z**2)/sqrt(pi)) def _eval_rewrite_as_fresnels(self, z): arg = (1 - I)*z/sqrt(pi) return (1 + I)*(fresnelc(arg) - I*fresnels(arg)) def _eval_rewrite_as_fresnelc(self, z): arg = (1 - I)*z/sqrt(pi) return (1 + I)*(fresnelc(arg) - I*fresnels(arg)) def _eval_rewrite_as_meijerg(self, z): return z/sqrt(pi)*meijerg([Rational(1, 2)], [], [0], [-Rational(1, 2)], z**2) def _eval_rewrite_as_hyper(self, z): return 2*z/sqrt(pi)*hyper([Rational(1, 2)], [Rational(3, 2)], -z**2) def _eval_rewrite_as_expint(self, z): return sqrt(z**2)/z - z*expint(Rational(1, 2), z**2)/sqrt(pi) def _eval_rewrite_as_tractable(self, z): return 1 - _erfs(z)*exp(-z**2) def _eval_rewrite_as_erfc(self, z): return 1 - erfc(z) def _eval_rewrite_as_erfi(self, z): return -I*erfi(I*z) def _eval_as_leading_term(self, x): from ...series import Order arg = self.args[0].as_leading_term(x) if x in arg.free_symbols and Order(1, x).contains(arg): return 2*x/sqrt(pi) else: return self.func(arg) def as_real_imag(self, deep=True, **hints): if self.args[0].is_extended_real: if deep: hints['complex'] = False return self.expand(deep, **hints), Integer(0) else: return self, Integer(0) if deep: x, y = self.args[0].expand(deep, **hints).as_real_imag() else: x, y = self.args[0].as_real_imag() if x.is_zero: re = Integer(0) im = erfi(y) else: sq = -y**2/x**2 re = (self.func(x + x*sqrt(sq)) + self.func(x - x*sqrt(sq)))/2 im = x/(2*y)*sqrt(sq)*(self.func(x - x*sqrt(sq)) - self.func(x + x*sqrt(sq))) return re, im class erfc(Function): r""" Complementary Error Function. The function is defined as: .. math :: \mathrm{erfc}(x) = \frac{2}{\sqrt{\pi}} \int_x^\infty e^{-t^2} \mathrm{d}t Examples ======== Several special values are known: >>> erfc(0) 1 >>> erfc(oo) 0 >>> erfc(-oo) 2 >>> erfc(I*oo) -oo*I >>> erfc(-I*oo) oo*I The error function obeys the mirror symmetry: >>> conjugate(erfc(z)) erfc(conjugate(z)) Differentiation with respect to z is supported: >>> diff(erfc(z), z) -2*E**(-z**2)/sqrt(pi) It also follows >>> erfc(-z) -erfc(z) + 2 We can numerically evaluate the complementary error function to arbitrary precision on the whole complex plane: >>> erfc(4).evalf(30) 0.0000000154172579002800188521596734869 >>> erfc(4*I).evalf(30) 1.0 - 1296959.73071763923152794095062*I See Also ======== erf: Gaussian error function. erfi: Imaginary error function. erf2: Two-argument error function. erfinv: Inverse error function. erfcinv: Inverse Complementary error function. erf2inv: Inverse two-argument error function. References ========== * https://en.wikipedia.org/wiki/Error_function * https://dlmf.nist.gov/7 * https://mathworld.wolfram.com/Erfc.html * http://functions.wolfram.com/GammaBetaErf/Erfc """ unbranched = True def fdiff(self, argindex=1): if argindex == 1: return -2*exp(-self.args[0]**2)/sqrt(pi) else: raise ArgumentIndexError(self, argindex) def inverse(self, argindex=1): """Returns the inverse of this function.""" return erfcinv @classmethod def eval(cls, arg): if arg.is_Number: if arg is oo: return Integer(0) elif arg == 0: return Integer(1) if isinstance(arg, erfinv): return 1 - arg.args[0] if isinstance(arg, erfcinv): return arg.args[0] # Try to pull out factors of I t = arg.extract_multiplicatively(I) if t in (oo, -oo): return -arg # Try to pull out factors of -1 if arg.could_extract_minus_sign(): return Integer(2) - cls(-arg) @staticmethod @cacheit def taylor_term(n, x, *previous_terms): if n == 0: return Integer(1) elif n < 0 or n % 2 == 0: return Integer(0) else: x = sympify(x) k = floor(Rational(n - 1, 2)) if len(previous_terms) >= 2: return -previous_terms[-2] * x**2 * (n - 2)/(n*k) else: return -2*(-1)**k * x**n/(n*factorial(k)*sqrt(pi)) def _eval_conjugate(self): return self.func(self.args[0].conjugate()) def _eval_is_extended_real(self): arg = self.args[0] if arg.is_extended_real: return True elif arg.is_imaginary and arg.is_nonzero: return False def _eval_rewrite_as_tractable(self, z): return self.rewrite(erf).rewrite('tractable', deep=True) def _eval_rewrite_as_erf(self, z): return 1 - erf(z) def _eval_rewrite_as_erfi(self, z): return 1 + I*erfi(I*z) def _eval_rewrite_as_fresnels(self, z): arg = (1 - I)*z/sqrt(pi) return 1 - (1 + I)*(fresnelc(arg) - I*fresnels(arg)) def _eval_rewrite_as_fresnelc(self, z): arg = (1 - I)*z/sqrt(pi) return 1 - (1 + I)*(fresnelc(arg) - I*fresnels(arg)) def _eval_rewrite_as_meijerg(self, z): return 1 - z/sqrt(pi)*meijerg([Rational(1, 2)], [], [0], [-Rational(1, 2)], z**2) def _eval_rewrite_as_hyper(self, z): return 1 - 2*z/sqrt(pi)*hyper([Rational(1, 2)], [Rational(3, 2)], -z**2) def _eval_rewrite_as_uppergamma(self, z): from .gamma_functions import uppergamma return 1 - sqrt(z**2)/z*(1 - uppergamma(Rational(1, 2), z**2)/sqrt(pi)) def _eval_rewrite_as_expint(self, z): return 1 - sqrt(z**2)/z + z*expint(Rational(1, 2), z**2)/sqrt(pi) def _eval_as_leading_term(self, x): from ...series import Order arg = self.args[0].as_leading_term(x) if x in arg.free_symbols and Order(1, x).contains(arg): return Integer(1) else: return self.func(arg) def as_real_imag(self, deep=True, **hints): if self.args[0].is_extended_real: if deep: hints['complex'] = False return self.expand(deep, **hints), Integer(0) else: return self, Integer(0) if deep: x, y = self.args[0].expand(deep, **hints).as_real_imag() else: x, y = self.args[0].as_real_imag() if x.is_zero: re = Integer(1) im = -erfi(y) else: sq = -y**2/x**2 re = (self.func(x + x*sqrt(sq)) + self.func(x - x*sqrt(sq)))/2 im = x/(2*y)*sqrt(sq)*(self.func(x - x*sqrt(sq)) - self.func(x + x*sqrt(sq))) return re, im class erfi(Function): r""" Imaginary error function. The function erfi is defined as: .. math :: \mathrm{erfi}(x) = \frac{2}{\sqrt{\pi}} \int_0^x e^{t^2} \mathrm{d}t Examples ======== Several special values are known: >>> erfi(0) 0 >>> erfi(oo) oo >>> erfi(-oo) -oo >>> erfi(I*oo) I >>> erfi(-I*oo) -I In general one can pull out factors of -1 and I from the argument: >>> erfi(-z) -erfi(z) >>> conjugate(erfi(z)) erfi(conjugate(z)) Differentiation with respect to z is supported: >>> diff(erfi(z), z) 2*E**(z**2)/sqrt(pi) We can numerically evaluate the imaginary error function to arbitrary precision on the whole complex plane: >>> erfi(2).evalf(30) 18.5648024145755525987042919132 >>> erfi(-2*I).evalf(30) -0.995322265018952734162069256367*I See Also ======== erf: Gaussian error function. erfc: Complementary error function. erf2: Two-argument error function. erfinv: Inverse error function. erfcinv: Inverse Complementary error function. erf2inv: Inverse two-argument error function. References ========== * https://en.wikipedia.org/wiki/Error_function * https://mathworld.wolfram.com/Erfi.html * http://functions.wolfram.com/GammaBetaErf/Erfi """ unbranched = True def fdiff(self, argindex=1): if argindex == 1: return 2*exp(self.args[0]**2)/sqrt(pi) else: raise ArgumentIndexError(self, argindex) @classmethod def eval(cls, z): if z.is_Number: if z == 0: return Integer(0) elif z is oo: return oo # Try to pull out factors of -1 if z.could_extract_minus_sign(): return -cls(-z) # Try to pull out factors of I nz = z.extract_multiplicatively(I) if nz is not None: if nz is oo: return I if isinstance(nz, erfinv): return I*nz.args[0] if isinstance(nz, erfcinv): return I*(1 - nz.args[0]) @staticmethod @cacheit def taylor_term(n, x, *previous_terms): if n < 0 or n % 2 == 0: return Integer(0) else: x = sympify(x) k = floor(Rational(n - 1, 2)) if len(previous_terms) >= 2: return previous_terms[-2] * x**2 * (n - 2)/(n*k) else: return 2 * x**n/(n*factorial(k)*sqrt(pi)) def _eval_conjugate(self): return self.func(self.args[0].conjugate()) def _eval_is_extended_real(self): arg = self.args[0] if arg.is_extended_real: return True elif arg.is_imaginary and arg.is_nonzero: return False def _eval_rewrite_as_tractable(self, z): return self.rewrite(erf).rewrite('tractable', deep=True) def _eval_rewrite_as_erf(self, z): return -I*erf(I*z) def _eval_rewrite_as_erfc(self, z): return I*erfc(I*z) - I def _eval_rewrite_as_fresnels(self, z): arg = (1 + I)*z/sqrt(pi) return (1 - I)*(fresnelc(arg) - I*fresnels(arg)) def _eval_rewrite_as_fresnelc(self, z): arg = (1 + I)*z/sqrt(pi) return (1 - I)*(fresnelc(arg) - I*fresnels(arg)) def _eval_rewrite_as_meijerg(self, z): return z/sqrt(pi)*meijerg([Rational(1, 2)], [], [0], [-Rational(1, 2)], -z**2) def _eval_rewrite_as_hyper(self, z): return 2*z/sqrt(pi)*hyper([Rational(1, 2)], [Rational(3, 2)], z**2) def _eval_rewrite_as_uppergamma(self, z): from .gamma_functions import uppergamma return sqrt(-z**2)/z*(uppergamma(Rational(1, 2), -z**2)/sqrt(pi) - 1) def _eval_rewrite_as_expint(self, z): return sqrt(-z**2)/z - z*expint(Rational(1, 2), -z**2)/sqrt(pi) def as_real_imag(self, deep=True, **hints): if self.args[0].is_extended_real: if deep: hints['complex'] = False return self.expand(deep, **hints), Integer(0) else: return self, Integer(0) if deep: x, y = self.args[0].expand(deep, **hints).as_real_imag() else: x, y = self.args[0].as_real_imag() if x.is_zero: re = Integer(0) im = erf(y) else: sq = -y**2/x**2 re = (self.func(x + x*sqrt(sq)) + self.func(x - x*sqrt(sq)))/2 im = x/(2*y)*sqrt(sq)*(self.func(x - x*sqrt(sq)) - self.func(x + x*sqrt(sq))) return re, im class erf2(Function): r""" Two-argument error function. This function is defined as: .. math :: \mathrm{erf2}(x, y) = \frac{2}{\sqrt{\pi}} \int_x^y e^{-t^2} \mathrm{d}t Examples ======== Several special values are known: >>> erf2(0, 0) 0 >>> erf2(x, x) 0 >>> erf2(x, oo) -erf(x) + 1 >>> erf2(x, -oo) -erf(x) - 1 >>> erf2(oo, y) erf(y) - 1 >>> erf2(-oo, y) erf(y) + 1 In general one can pull out factors of -1: >>> erf2(-x, -y) -erf2(x, y) The error function obeys the mirror symmetry: >>> conjugate(erf2(x, y)) erf2(conjugate(x), conjugate(y)) Differentiation with respect to x, y is supported: >>> diff(erf2(x, y), x) -2*E**(-x**2)/sqrt(pi) >>> diff(erf2(x, y), y) 2*E**(-y**2)/sqrt(pi) See Also ======== erf: Gaussian error function. erfc: Complementary error function. erfi: Imaginary error function. erfinv: Inverse error function. erfcinv: Inverse Complementary error function. erf2inv: Inverse two-argument error function. References ========== * http://functions.wolfram.com/GammaBetaErf/Erf2/ """ def fdiff(self, argindex): x, y = self.args if argindex == 1: return -2*exp(-x**2)/sqrt(pi) elif argindex == 2: return 2*exp(-y**2)/sqrt(pi) else: raise ArgumentIndexError(self, argindex) @classmethod def eval(cls, x, y): I = oo N = -oo O = Integer(0) if x == y: return Integer(0) elif (x is I or x is N or x is O) or (y is I or y is N or y is O): return erf(y) - erf(x) if isinstance(y, erf2inv) and y.args[0] == x: return y.args[1] # Try to pull out -1 factor sign_x = x.could_extract_minus_sign() sign_y = y.could_extract_minus_sign() if (sign_x and sign_y): return -cls(-x, -y) elif (sign_x or sign_y): return erf(y)-erf(x) def _eval_conjugate(self): return self.func(self.args[0].conjugate(), self.args[1].conjugate()) def _eval_is_extended_real(self): x, y = self.args if y.is_extended_real: if x.is_extended_real: return True elif x.is_imaginary and x.is_nonzero: return False def _eval_rewrite_as_erf(self, x, y): return erf(y) - erf(x) def _eval_rewrite_as_erfc(self, x, y): return erfc(x) - erfc(y) def _eval_rewrite_as_erfi(self, x, y): return I*(erfi(I*x)-erfi(I*y)) def _eval_rewrite_as_fresnels(self, x, y): return erf(y).rewrite(fresnels) - erf(x).rewrite(fresnels) def _eval_rewrite_as_fresnelc(self, x, y): return erf(y).rewrite(fresnelc) - erf(x).rewrite(fresnelc) def _eval_rewrite_as_meijerg(self, x, y): return erf(y).rewrite(meijerg) - erf(x).rewrite(meijerg) def _eval_rewrite_as_hyper(self, x, y): return erf(y).rewrite(hyper) - erf(x).rewrite(hyper) def _eval_rewrite_as_uppergamma(self, x, y): from .gamma_functions import uppergamma return (sqrt(y**2)/y*(1 - uppergamma(Rational(1, 2), y**2)/sqrt(pi)) - sqrt(x**2)/x*(1 - uppergamma(Rational(1, 2), x**2)/sqrt(pi))) def _eval_rewrite_as_expint(self, x, y): return erf(y).rewrite(expint) - erf(x).rewrite(expint) class erfinv(Function): r""" Inverse Error Function. The erfinv function is defined as: .. math :: \mathrm{erf}(x) = y \quad \Rightarrow \quad \mathrm{erfinv}(y) = x Examples ======== Several special values are known: >>> erfinv(0) 0 >>> erfinv(1) oo Differentiation with respect to x is supported: >>> diff(erfinv(x), x) E**(erfinv(x)**2)*sqrt(pi)/2 We can numerically evaluate the inverse error function to arbitrary precision on [-1, 1]: >>> erfinv(0.2) 0.179143454621292 See Also ======== erf: Gaussian error function. erfc: Complementary error function. erfi: Imaginary error function. erf2: Two-argument error function. erfcinv: Inverse Complementary error function. erf2inv: Inverse two-argument error function. References ========== * https://en.wikipedia.org/wiki/Error_function#Inverse_functions * http://functions.wolfram.com/GammaBetaErf/InverseErf/ """ def fdiff(self, argindex=1): if argindex == 1: return sqrt(pi)*exp(self.func(self.args[0])**2)/2 else: raise ArgumentIndexError(self, argindex) def inverse(self, argindex=1): """Returns the inverse of this function.""" return erf @classmethod def eval(cls, z): if z == -1: return -oo elif z == 0: return Integer(0) elif z == 1: return oo if isinstance(z, erf) and z.args[0].is_extended_real: return z.args[0] # Try to pull out factors of -1 nz = z.extract_multiplicatively(-1) if isinstance(nz, erf) and nz.args[0].is_extended_real: return -nz.args[0] def _eval_rewrite_as_erfcinv(self, z): return erfcinv(1-z) class erfcinv(Function): r""" Inverse Complementary Error Function. The erfcinv function is defined as: .. math :: \mathrm{erfc}(x) = y \quad \Rightarrow \quad \mathrm{erfcinv}(y) = x Examples ======== Several special values are known: >>> erfcinv(1) 0 >>> erfcinv(0) oo Differentiation with respect to x is supported: >>> diff(erfcinv(x), x) -E**(erfcinv(x)**2)*sqrt(pi)/2 See Also ======== erf: Gaussian error function. erfc: Complementary error function. erfi: Imaginary error function. erf2: Two-argument error function. erfinv: Inverse error function. erf2inv: Inverse two-argument error function. References ========== * https://en.wikipedia.org/wiki/Error_function#Inverse_functions * http://functions.wolfram.com/GammaBetaErf/InverseErfc/ """ def fdiff(self, argindex=1): if argindex == 1: return -sqrt(pi)*exp(self.func(self.args[0])**2)/2 else: raise ArgumentIndexError(self, argindex) def inverse(self, argindex=1): """Returns the inverse of this function.""" return erfc @classmethod def eval(cls, z): if z == 0: return oo elif z == 1: return Integer(0) elif z == 2: return -oo def _eval_rewrite_as_erfinv(self, z): return erfinv(1-z) class erf2inv(Function): r""" Two-argument Inverse error function. The erf2inv function is defined as: .. math :: \mathrm{erf2}(x, w) = y \quad \Rightarrow \quad \mathrm{erf2inv}(x, y) = w Examples ======== Several special values are known: >>> erf2inv(0, 0) 0 >>> erf2inv(1, 0) 1 >>> erf2inv(0, 1) oo >>> erf2inv(0, y) erfinv(y) >>> erf2inv(oo, y) erfcinv(-y) Differentiation with respect to x and y is supported: >>> diff(erf2inv(x, y), x) E**(-x**2 + erf2inv(x, y)**2) >>> diff(erf2inv(x, y), y) E**(erf2inv(x, y)**2)*sqrt(pi)/2 See Also ======== erf: Gaussian error function. erfc: Complementary error function. erfi: Imaginary error function. erf2: Two-argument error function. erfinv: Inverse error function. erfcinv: Inverse complementary error function. References ========== * http://functions.wolfram.com/GammaBetaErf/InverseErf2/ """ def fdiff(self, argindex): x, y = self.args if argindex == 1: return exp(self.func(x, y)**2-x**2) elif argindex == 2: return sqrt(pi)*exp(self.func(x, y)**2)/2 else: raise ArgumentIndexError(self, argindex) @classmethod def eval(cls, x, y): if x == 0 and y == 0: return Integer(0) elif x == 0 and y == 1: return oo elif x == 1 and y == 0: return Integer(1) elif x == 0: return erfinv(y) elif x is oo: return erfcinv(-y) elif y == 0: return x elif y is oo: return erfinv(x) ############################################################################### # ################## EXPONENTIAL INTEGRALS ################################## # ############################################################################### class Ei(Function): r""" The classical exponential integral. For use in Diofant, this function is defined as .. math:: \operatorname{Ei}(x) = \sum_{n=1}^\infty \frac{x^n}{n\, n!} + \log(x) + \gamma, where `\gamma` is the Euler-Mascheroni constant. If `x` is a polar number, this defines an analytic function on the Riemann surface of the logarithm. Otherwise this defines an analytic function in the cut plane `\mathbb{C} \setminus (-\infty, 0]`. **Background** The name *exponential integral* comes from the following statement: .. math:: \operatorname{Ei}(x) = \int_{-\infty}^x \frac{e^t}{t} \mathrm{d}t If the integral is interpreted as a Cauchy principal value, this statement holds for `x > 0` and `\operatorname{Ei}(x)` as defined above. Note that we carefully avoided defining `\operatorname{Ei}(x)` for negative real `x`. This is because above integral formula does not hold for any polar lift of such `x`, indeed all branches of `\operatorname{Ei}(x)` above the negative reals are imaginary. However, the following statement holds for all `x \in \mathbb{R}^*`: .. math:: \int_{-\infty}^x \frac{e^t}{t} \mathrm{d}t = \frac{\operatorname{Ei}\left(|x|e^{i \arg(x)}\right) + \operatorname{Ei}\left(|x|e^{- i \arg(x)}\right)}{2}, where the integral is again understood to be a principal value if `x > 0`, and `|x|e^{i \arg(x)}`, `|x|e^{- i \arg(x)}` denote two conjugate polar lifts of `x`. Examples ======== The exponential integral in Diofant is strictly undefined for negative values of the argument. For convenience, exponential integrals with negative arguments are immediately converted into an expression that agrees with the classical integral definition: >>> Ei(-1) -I*pi + Ei(exp_polar(I*pi)) This yields a real value: >>> Ei(-1).evalf(chop=True) -0.219383934395520 On the other hand the analytic continuation is not real: >>> Ei(polar_lift(-1)).evalf(chop=True) -0.21938393439552 + 3.14159265358979*I The exponential integral has a logarithmic branch point at the origin: >>> Ei(x*exp_polar(2*I*pi)) Ei(x) + 2*I*pi Differentiation is supported: >>> Ei(x).diff(x) E**x/x The exponential integral is related to many other special functions. For example: >>> Ei(x).rewrite(expint) -expint(1, x*exp_polar(I*pi)) - I*pi >>> Ei(x).rewrite(Shi) Chi(x) + Shi(x) See Also ======== expint: Generalized exponential integral. E1: Special case of the generalized exponential integral. li: Logarithmic integral. Li: Offset logarithmic integral. Si: Sine integral. Ci: Cosine integral. Shi: Hyperbolic sine integral. Chi: Hyperbolic cosine integral. diofant.functions.special.gamma_functions.uppergamma: Upper incomplete gamma function. References ========== * https://dlmf.nist.gov/6.6 * https://en.wikipedia.org/wiki/Exponential_integral * Abramowitz & Stegun, section 5: http://people.math.sfu.ca/~cbm/aands/page_228.htm """ @classmethod def eval(cls, z): if z == 0: return -oo elif z is oo: return oo elif z == -oo: return Integer(0) if not z.is_polar and z.is_negative: # Note: is this a good idea? return Ei(polar_lift(z)) - pi*I nz, n = z.extract_branch_factor() if n: return Ei(nz) + 2*I*pi*n def fdiff(self, argindex=1): from .. import unpolarify arg = unpolarify(self.args[0]) if argindex == 1: return exp(arg)/arg else: raise ArgumentIndexError(self, argindex) def _eval_evalf(self, prec): if (self.args[0]/polar_lift(-1)).is_positive: return Function._eval_evalf(self, prec) + (I*pi)._eval_evalf(prec) return Function._eval_evalf(self, prec) def _eval_rewrite_as_uppergamma(self, z): from .gamma_functions import uppergamma # XXX this does not currently work usefully because uppergamma # immediately turns into expint return -uppergamma(0, polar_lift(-1)*z) - I*pi def _eval_rewrite_as_expint(self, z): return -expint(1, polar_lift(-1)*z) - I*pi def _eval_rewrite_as_li(self, z): if isinstance(z, log): return li(z.args[0]) # TODO: # Actually it only holds that: # Ei(z) = li(exp(z)) # for -pi < imag(z) <= pi return li(exp(z)) def _eval_rewrite_as_Si(self, z): return Shi(z) + Chi(z) _eval_rewrite_as_Ci = _eval_rewrite_as_Si _eval_rewrite_as_Chi = _eval_rewrite_as_Si _eval_rewrite_as_Shi = _eval_rewrite_as_Si def _eval_rewrite_as_tractable(self, z): return exp(z) * _eis(z) def _eval_nseries(self, x, n, logx): x0 = self.args[0].limit(x, 0) if x0 == 0: f = self._eval_rewrite_as_Si(*self.args) return f._eval_nseries(x, n, logx) return super()._eval_nseries(x, n, logx) class expint(Function): r""" Generalized exponential integral. This function is defined as .. math:: \operatorname{E}_\nu(z) = z^{\nu - 1} \Gamma(1 - \nu, z), where `\Gamma(1 - \nu, z)` is the upper incomplete gamma function (``uppergamma``). Hence for `z` with positive real part we have .. math:: \operatorname{E}_\nu(z) = \int_1^\infty \frac{e^{-zt}}{t^\nu} \mathrm{d}t, which explains the name. The representation as an incomplete gamma function provides an analytic continuation for `\operatorname{E}_\nu(z)`. If `\nu` is a non-positive integer the exponential integral is thus an unbranched function of `z`, otherwise there is a branch point at the origin. Refer to the incomplete gamma function documentation for details of the branching behavior. Examples ======== >>> from diofant.abc import nu Differentiation is supported. Differentiation with respect to z explains further the name: for integral orders, the exponential integral is an iterated integral of the exponential function. >>> expint(nu, z).diff(z) -expint(nu - 1, z) Differentiation with respect to nu has no classical expression: >>> expint(nu, z).diff(nu) -z**(nu - 1)*meijerg(((), (1, 1)), ((0, 0, -nu + 1), ()), z) At non-postive integer orders, the exponential integral reduces to the exponential function: >>> expint(0, z) E**(-z)/z >>> expint(-1, z) E**(-z)/z + E**(-z)/z**2 At half-integers it reduces to error functions: >>> expint(Rational(1, 2), z) -sqrt(pi)*erf(sqrt(z))/sqrt(z) + sqrt(pi)/sqrt(z) At positive integer orders it can be rewritten in terms of exponentials and expint(1, z). Use expand_func() to do this: >>> expand_func(expint(5, z)) z**4*expint(1, z)/24 + E**(-z)*(-z**3 + z**2 - 2*z + 6)/24 The generalized exponential integral is essentially equivalent to the incomplete gamma function: >>> expint(nu, z).rewrite(uppergamma) z**(nu - 1)*uppergamma(-nu + 1, z) As such it is branched at the origin: >>> expint(4, z*exp_polar(2*pi*I)) I*pi*z**3/3 + expint(4, z) >>> expint(nu, z*exp_polar(2*pi*I)) z**(nu - 1)*(E**(2*I*pi*nu) - 1)*gamma(-nu + 1) + expint(nu, z) See Also ======== Ei: Another related function called exponential integral. E1: The classical case, returns expint(1, z). li: Logarithmic integral. Li: Offset logarithmic integral. Si: Sine integral. Ci: Cosine integral. Shi: Hyperbolic sine integral. Chi: Hyperbolic cosine integral. diofant.functions.special.gamma_functions.uppergamma References ========== * https://dlmf.nist.gov/8.19 * http://functions.wolfram.com/GammaBetaErf/ExpIntegralE/ * https://en.wikipedia.org/wiki/Exponential_integral """ @classmethod def eval(cls, nu, z): from .. import exp, factorial, gamma, unpolarify, uppergamma nu2 = unpolarify(nu) if nu != nu2: return expint(nu2, z) if nu.is_Integer and nu <= 0 or (not nu.is_Integer and (2*nu).is_Integer): return unpolarify(expand_mul(z**(nu - 1)*uppergamma(1 - nu, z))) # Extract branching information. This can be deduced from what is # explained in lowergamma.eval(). z, n = z.extract_branch_factor() if n == 0: return if nu.is_integer: if nu.is_positive: return expint(nu, z) \ - 2*pi*I*n*(-1)**(nu - 1)/factorial(nu - 1)*unpolarify(z)**(nu - 1) else: return (exp(2*I*pi*nu*n) - 1)*z**(nu - 1)*gamma(1 - nu) + expint(nu, z) def fdiff(self, argindex): from .hyper import meijerg nu, z = self.args if argindex == 1: return -z**(nu - 1)*meijerg([], [1, 1], [0, 0, 1 - nu], [], z) elif argindex == 2: return -expint(nu - 1, z) else: raise ArgumentIndexError(self, argindex) def _eval_rewrite_as_uppergamma(self, nu, z): from .gamma_functions import uppergamma return z**(nu - 1)*uppergamma(1 - nu, z) def _eval_rewrite_as_Ei(self, nu, z): from .. import exp, exp_polar, factorial, unpolarify if nu == 1: return -Ei(z*exp_polar(-I*pi)) - I*pi elif nu.is_Integer and nu > 1: # DLMF, 8.19.7 x = -unpolarify(z) return x**(nu - 1)/factorial(nu - 1)*E1(z).rewrite(Ei) + \ exp(x)/factorial(nu - 1) * \ Add(*[factorial(nu - k - 2)*x**k for k in range(nu - 1)]) else: return self def _eval_expand_func(self, **hints): return self.rewrite(Ei).rewrite(expint, **hints) def _eval_rewrite_as_Si(self, nu, z): if nu != 1: return self return Shi(z) - Chi(z) _eval_rewrite_as_Ci = _eval_rewrite_as_Si _eval_rewrite_as_Chi = _eval_rewrite_as_Si _eval_rewrite_as_Shi = _eval_rewrite_as_Si def _eval_nseries(self, x, n, logx): nu = self.args[0] if not nu.has(x) and nu.is_Integer and nu.is_positive: f = self._eval_rewrite_as_Ei(*self.args) return f._eval_nseries(x, n, logx) return super()._eval_nseries(x, n, logx) def E1(z): """ Classical case of the generalized exponential integral. This is equivalent to ``expint(1, z)``. See Also ======== Ei: Exponential integral. expint: Generalized exponential integral. li: Logarithmic integral. Li: Offset logarithmic integral. Si: Sine integral. Ci: Cosine integral. Shi: Hyperbolic sine integral. Chi: Hyperbolic cosine integral. """ return expint(1, z) class li(Function): r""" The classical logarithmic integral. For the use in Diofant, this function is defined as .. math:: \operatorname{li}(x) = \int_0^x \frac{1}{\log(t)} \mathrm{d}t \,. Examples ======== Several special values are known: >>> li(0) 0 >>> li(1) -oo >>> li(oo) oo Differentiation with respect to z is supported: >>> diff(li(z), z) 1/log(z) Defining the `li` function via an integral: The logarithmic integral can also be defined in terms of Ei: >>> li(z).rewrite(Ei) Ei(log(z)) >>> diff(li(z).rewrite(Ei), z) 1/log(z) We can numerically evaluate the logarithmic integral to arbitrary precision on the whole complex plane (except the singular points): >>> li(2).evalf(30) 1.04516378011749278484458888919 >>> li(2*I).evalf(30) 1.0652795784357498247001125598 + 3.08346052231061726610939702133*I We can even compute Soldner's constant by the help of mpmath: >>> from mpmath import findroot >>> print(findroot(li, 2)) 1.45136923488338 Further transformations include rewriting `li` in terms of the trigonometric integrals `Si`, `Ci`, `Shi` and `Chi`: >>> li(z).rewrite(Si) -log(I*log(z)) - log(1/log(z))/2 + log(log(z))/2 + Ci(I*log(z)) + Shi(log(z)) >>> li(z).rewrite(Ci) -log(I*log(z)) - log(1/log(z))/2 + log(log(z))/2 + Ci(I*log(z)) + Shi(log(z)) >>> li(z).rewrite(Shi) -log(1/log(z))/2 + log(log(z))/2 + Chi(log(z)) - Shi(log(z)) >>> li(z).rewrite(Chi) -log(1/log(z))/2 + log(log(z))/2 + Chi(log(z)) - Shi(log(z)) See Also ======== Li: Offset logarithmic integral. Ei: Exponential integral. expint: Generalized exponential integral. E1: Special case of the generalized exponential integral. Si: Sine integral. Ci: Cosine integral. Shi: Hyperbolic sine integral. Chi: Hyperbolic cosine integral. References ========== * https://en.wikipedia.org/wiki/Logarithmic_integral * https://mathworld.wolfram.com/LogarithmicIntegral.html * https://dlmf.nist.gov/6 * https://mathworld.wolfram.com/SoldnersConstant.html """ @classmethod def eval(cls, z): if z == 0: return Integer(0) elif z == 1: return -oo elif z is oo: return oo def fdiff(self, argindex=1): arg = self.args[0] if argindex == 1: return 1/log(arg) else: raise ArgumentIndexError(self, argindex) def _eval_conjugate(self): z = self.args[0] # Exclude values on the branch cut (-oo, 0) if not (z.is_extended_real and z.is_negative): return self.func(z.conjugate()) def _eval_rewrite_as_Li(self, z): return Li(z) + li(2) def _eval_rewrite_as_Ei(self, z): return Ei(log(z)) def _eval_rewrite_as_uppergamma(self, z): from .gamma_functions import uppergamma return (-uppergamma(0, -log(z)) + (log(log(z)) - log(1/log(z)))/2 - log(-log(z))) def _eval_rewrite_as_Si(self, z): return (Ci(I*log(z)) - I*Si(I*log(z)) - (log(1/log(z)) - log(log(z)))/2 - log(I*log(z))) _eval_rewrite_as_Ci = _eval_rewrite_as_Si def _eval_rewrite_as_Shi(self, z): return (Chi(log(z)) - Shi(log(z)) - (log(1/log(z)) - log(log(z)))/2) _eval_rewrite_as_Chi = _eval_rewrite_as_Shi def _eval_rewrite_as_hyper(self, z): return (log(z)*hyper((1, 1), (2, 2), log(z)) + (log(log(z)) - log(1/log(z)))/2 + EulerGamma) def _eval_rewrite_as_meijerg(self, z): return (-log(-log(z)) - (log(1/log(z)) - log(log(z)))/2 - meijerg(((), (1,)), ((0, 0), ()), -log(z))) def _eval_rewrite_as_tractable(self, z): return z * _eis(log(z)) class Li(Function): r""" The offset logarithmic integral. For the use in Diofant, this function is defined as .. math:: \operatorname{Li}(x) = \operatorname{li}(x) - \operatorname{li}(2) Examples ======== The following special value is known: >>> Li(2) 0 Differentiation with respect to z is supported: >>> diff(Li(z), z) 1/log(z) The shifted logarithmic integral can be written in terms of `li(z)`: >>> Li(z).rewrite(li) li(z) - li(2) We can numerically evaluate the logarithmic integral to arbitrary precision on the whole complex plane (except the singular points): >>> Li(2).evalf(30) 0 >>> Li(4).evalf(30) 1.92242131492155809316615998938 See Also ======== li: Logarithmic integral. Ei: Exponential integral. expint: Generalized exponential integral. E1: Special case of the generalized exponential integral. Si: Sine integral. Ci: Cosine integral. Shi: Hyperbolic sine integral. Chi: Hyperbolic cosine integral. References ========== * https://en.wikipedia.org/wiki/Logarithmic_integral * https://mathworld.wolfram.com/LogarithmicIntegral.html * https://dlmf.nist.gov/6 """ @classmethod def eval(cls, z): if z is oo: return oo elif z == 2: return Integer(0) def fdiff(self, argindex=1): arg = self.args[0] if argindex == 1: return 1/log(arg) else: raise ArgumentIndexError(self, argindex) def _eval_evalf(self, prec): return self.rewrite(li).evalf(prec) def _eval_rewrite_as_li(self, z): return li(z) - li(2) def _eval_rewrite_as_tractable(self, z): return self.rewrite(li).rewrite('tractable', deep=True) ############################################################################### # ################## TRIGONOMETRIC INTEGRALS ################################ # ############################################################################### class TrigonometricIntegral(Function): """Base class for trigonometric integrals.""" @classmethod def eval(cls, z): if z == 0: return cls._atzero elif z is oo: return cls._atinf() elif z == -oo: return cls._atneginf() nz = z.extract_multiplicatively(polar_lift(I)) if nz is None and cls._trigfunc(0) == 0: nz = z.extract_multiplicatively(I) if nz is not None: return cls._Ifactor(nz, 1) nz = z.extract_multiplicatively(polar_lift(-I)) if nz is not None: return cls._Ifactor(nz, -1) nz = z.extract_multiplicatively(polar_lift(-1)) if nz is None and cls._trigfunc(0) == 0: nz = z.extract_multiplicatively(-1) if nz is not None: return cls._minusfactor(nz) nz, n = z.extract_branch_factor() if n != 0 or nz != z: return 2*pi*I*n*cls._trigfunc(0) + cls(nz) def fdiff(self, argindex=1): from .. import unpolarify arg = unpolarify(self.args[0]) if argindex == 1: return self._trigfunc(arg)/arg else: raise ArgumentIndexError(self, argindex) def _eval_rewrite_as_Ei(self, z): return self._eval_rewrite_as_expint(z).rewrite(Ei) def _eval_nseries(self, x, n, logx): # NOTE this is fairly inefficient n += 1 if self.args[0].subs({x: 0}) != 0: return super()._eval_nseries(x, n, logx) baseseries = self._trigfunc(x)._eval_nseries(x, n, logx) if self._trigfunc(0) != 0: baseseries -= 1 baseseries = baseseries.replace(Pow, lambda t, n: t**n/n) if self._trigfunc(0) != 0: baseseries += EulerGamma + log(x) return baseseries.subs({x: self.args[0]})._eval_nseries(x, n, logx) class Si(TrigonometricIntegral): r""" Sine integral. This function is defined by .. math:: \operatorname{Si}(z) = \int_0^z \frac{\sin{t}}{t} \mathrm{d}t. It is an entire function. Examples ======== The sine integral is an antiderivative of sin(z)/z: >>> Si(z).diff(z) sin(z)/z It is unbranched: >>> Si(z*exp_polar(2*I*pi)) Si(z) Sine integral behaves much like ordinary sine under multiplication by ``I``: >>> Si(I*z) I*Shi(z) >>> Si(-z) -Si(z) It can also be expressed in terms of exponential integrals, but beware that the latter is branched: >>> Si(z).rewrite(expint) -I*(-expint(1, z*exp_polar(-I*pi/2))/2 + expint(1, z*exp_polar(I*pi/2))/2) + pi/2 See Also ======== Ci: Cosine integral. Shi: Hyperbolic sine integral. Chi: Hyperbolic cosine integral. Ei: Exponential integral. expint: Generalized exponential integral. E1: Special case of the generalized exponential integral. li: Logarithmic integral. Li: Offset logarithmic integral. References ========== * https://en.wikipedia.org/wiki/Trigonometric_integral """ _trigfunc = sin _atzero = Integer(0) @classmethod def _atinf(cls): return pi/2 @classmethod def _atneginf(cls): return -pi/2 @classmethod def _minusfactor(cls, z): return -Si(z) @classmethod def _Ifactor(cls, z, sign): return I*Shi(z)*sign def _eval_rewrite_as_expint(self, z): # XXX should we polarify z? return pi/2 + (E1(polar_lift(I)*z) - E1(polar_lift(-I)*z))/2/I class Ci(TrigonometricIntegral): r""" Cosine integral. This function is defined for positive `x` by .. math:: \operatorname{Ci}(x) = \gamma + \log{x} + \int_0^x \frac{\cos{t} - 1}{t} \mathrm{d}t = -\int_x^\infty \frac{\cos{t}}{t} \mathrm{d}t, where `\gamma` is the Euler-Mascheroni constant. We have .. math:: \operatorname{Ci}(z) = -\frac{\operatorname{E}_1\left(e^{i\pi/2} z\right) + \operatorname{E}_1\left(e^{-i \pi/2} z\right)}{2} which holds for all polar `z` and thus provides an analytic continuation to the Riemann surface of the logarithm. The formula also holds as stated for `z \in \mathbb{C}` with `\Re(z) > 0`. By lifting to the principal branch we obtain an analytic function on the cut complex plane. Examples ======== The cosine integral is a primitive of `\cos(z)/z`: >>> Ci(z).diff(z) cos(z)/z It has a logarithmic branch point at the origin: >>> Ci(z*exp_polar(2*I*pi)) Ci(z) + 2*I*pi The cosine integral behaves somewhat like ordinary `\cos` under multiplication by `i`: >>> Ci(polar_lift(I)*z) Chi(z) + I*pi/2 >>> Ci(polar_lift(-1)*z) Ci(z) + I*pi It can also be expressed in terms of exponential integrals: >>> Ci(z).rewrite(expint) -expint(1, z*exp_polar(-I*pi/2))/2 - expint(1, z*exp_polar(I*pi/2))/2 See Also ======== Si: Sine integral. Shi: Hyperbolic sine integral. Chi: Hyperbolic cosine integral. Ei: Exponential integral. expint: Generalized exponential integral. E1: Special case of the generalized exponential integral. li: Logarithmic integral. Li: Offset logarithmic integral. References ========== * https://en.wikipedia.org/wiki/Trigonometric_integral """ _trigfunc = cos _atzero = zoo @classmethod def _atinf(cls): return Integer(0) @classmethod def _atneginf(cls): return I*pi @classmethod def _minusfactor(cls, z): return Ci(z) + I*pi @classmethod def _Ifactor(cls, z, sign): return Chi(z) + I*pi/2*sign def _eval_rewrite_as_expint(self, z): return -(E1(polar_lift(I)*z) + E1(polar_lift(-I)*z))/2 class Shi(TrigonometricIntegral): r""" Sinh integral. This function is defined by .. math:: \operatorname{Shi}(z) = \int_0^z \frac{\sinh{t}}{t} \mathrm{d}t. It is an entire function. Examples ======== The Sinh integral is a primitive of `\sinh(z)/z`: >>> Shi(z).diff(z) sinh(z)/z It is unbranched: >>> Shi(z*exp_polar(2*I*pi)) Shi(z) The `\sinh` integral behaves much like ordinary `\sinh` under multiplication by `i`: >>> Shi(I*z) I*Si(z) >>> Shi(-z) -Shi(z) It can also be expressed in terms of exponential integrals, but beware that the latter is branched: >>> Shi(z).rewrite(expint) expint(1, z)/2 - expint(1, z*exp_polar(I*pi))/2 - I*pi/2 See Also ======== Si: Sine integral. Ci: Cosine integral. Chi: Hyperbolic cosine integral. Ei: Exponential integral. expint: Generalized exponential integral. E1: Special case of the generalized exponential integral. li: Logarithmic integral. Li: Offset logarithmic integral. References ========== * https://en.wikipedia.org/wiki/Trigonometric_integral """ _trigfunc = sinh _atzero = Integer(0) @classmethod def _atinf(cls): return oo @classmethod def _atneginf(cls): return -oo @classmethod def _minusfactor(cls, z): return -Shi(z) @classmethod def _Ifactor(cls, z, sign): return I*Si(z)*sign def _eval_rewrite_as_expint(self, z): from .. import exp_polar # XXX should we polarify z? return (E1(z) - E1(exp_polar(I*pi)*z))/2 - I*pi/2 class Chi(TrigonometricIntegral): r""" Cosh integral. This function is defined for positive `x` by .. math:: \operatorname{Chi}(x) = \gamma + \log{x} + \int_0^x \frac{\cosh{t} - 1}{t} \mathrm{d}t, where `\gamma` is the Euler-Mascheroni constant. We have .. math:: \operatorname{Chi}(z) = \operatorname{Ci}\left(e^{i \pi/2}z\right) - i\frac{\pi}{2}, which holds for all polar `z` and thus provides an analytic continuation to the Riemann surface of the logarithm. By lifting to the principal branch we obtain an analytic function on the cut complex plane. Examples ======== The `\cosh` integral is a primitive of `\cosh(z)/z`: >>> Chi(z).diff(z) cosh(z)/z It has a logarithmic branch point at the origin: >>> Chi(z*exp_polar(2*I*pi)) Chi(z) + 2*I*pi The `\cosh` integral behaves somewhat like ordinary `\cosh` under multiplication by `i`: >>> Chi(polar_lift(I)*z) Ci(z) + I*pi/2 >>> Chi(polar_lift(-1)*z) Chi(z) + I*pi It can also be expressed in terms of exponential integrals: >>> Chi(z).rewrite(expint) -expint(1, z)/2 - expint(1, z*exp_polar(I*pi))/2 - I*pi/2 See Also ======== Si: Sine integral. Ci: Cosine integral. Shi: Hyperbolic sine integral. Ei: Exponential integral. expint: Generalized exponential integral. E1: Special case of the generalized exponential integral. li: Logarithmic integral. Li: Offset logarithmic integral. References ========== * https://en.wikipedia.org/wiki/Trigonometric_integral """ _trigfunc = cosh _atzero = zoo @classmethod def _atinf(cls): return oo @classmethod def _atneginf(cls): return oo @classmethod def _minusfactor(cls, z): return Chi(z) + I*pi @classmethod def _Ifactor(cls, z, sign): return Ci(z) + I*pi/2*sign def _eval_rewrite_as_expint(self, z): from .. import exp_polar return -I*pi/2 - (E1(z) + E1(exp_polar(I*pi)*z))/2 def _latex(self, printer, exp=None): if exp: return (r'\operatorname{Chi}^{%s}{\left (%s \right )}' # noqa: SFS101 % (printer._print(exp), printer._print(self.args[0]))) else: return (r'\operatorname{Chi}{\left (%s \right )}' # noqa: SFS101 % printer._print(self.args[0])) @staticmethod def _latex_no_arg(printer): return r'\operatorname{Chi}' ############################################################################### # ################## FRESNEL INTEGRALS ###################################### # ############################################################################### class FresnelIntegral(Function): """Base class for the Fresnel integrals.""" unbranched = True @classmethod def eval(cls, z): # Value at zero if z == 0: return Integer(0) # Try to pull out factors of -1 and I prefact = Integer(1) newarg = z changed = False nz = newarg.extract_multiplicatively(-1) if nz is not None: prefact = -prefact newarg = nz changed = True nz = newarg.extract_multiplicatively(I) if nz is not None: prefact = cls._sign*I*prefact newarg = nz changed = True if changed: return prefact*cls(newarg) # Values at positive infinities signs # if any were extracted automatically if z is oo: return Rational(1, 2) def fdiff(self, argindex=1): if argindex == 1: return self._trigfunc(pi*self.args[0]**2/2) else: raise ArgumentIndexError(self, argindex) def _eval_is_extended_real(self): if self.args[0].is_extended_real: return True def _eval_conjugate(self): return self.func(self.args[0].conjugate()) def as_real_imag(self, deep=True, **hints): if self.args[0].is_extended_real: if deep: hints['complex'] = False return self.expand(deep, **hints), Integer(0) else: return self, Integer(0) if deep: x, y = self.args[0].expand(deep, **hints).as_real_imag() else: x, y = self.args[0].as_real_imag() # Fresnel S # http://functions.wolfram.com/06.32.19.0003.01 # http://functions.wolfram.com/06.32.19.0006.01 # Fresnel C # http://functions.wolfram.com/06.33.19.0003.01 # http://functions.wolfram.com/06.33.19.0006.01 if x.is_zero: re, im = self.func(I*y).rewrite(erf).as_real_imag() else: sq = -y**2/x**2 re = (self.func(x + x*sqrt(sq)) + self.func(x - x*sqrt(sq)))/2 im = x/(2*y)*sqrt(sq)*(self.func(x - x*sqrt(sq)) - self.func(x + x*sqrt(sq))) return re, im class fresnels(FresnelIntegral): r""" Fresnel integral S. This function is defined by .. math:: \operatorname{S}(z) = \int_0^z \sin{\frac{\pi}{2} t^2} \mathrm{d}t. It is an entire function. Examples ======== Several special values are known: >>> fresnels(0) 0 >>> fresnels(oo) 1/2 >>> fresnels(-oo) -1/2 >>> fresnels(I*oo) -I/2 >>> fresnels(-I*oo) I/2 In general one can pull out factors of -1 and `i` from the argument: >>> fresnels(-z) -fresnels(z) >>> fresnels(I*z) -I*fresnels(z) The Fresnel S integral obeys the mirror symmetry `\overline{S(z)} = S(\bar{z})`: >>> conjugate(fresnels(z)) fresnels(conjugate(z)) Differentiation with respect to `z` is supported: >>> diff(fresnels(z), z) sin(pi*z**2/2) Defining the Fresnel functions via an integral >>> integrate(sin(pi*z**2/2), z) 3*fresnels(z)*gamma(3/4)/(4*gamma(7/4)) >>> expand_func(integrate(sin(pi*z**2/2), z)) fresnels(z) We can numerically evaluate the Fresnel integral to arbitrary precision on the whole complex plane: >>> fresnels(2).evalf(30) 0.343415678363698242195300815958 >>> fresnels(-2*I).evalf(30) 0.343415678363698242195300815958*I See Also ======== fresnelc: Fresnel cosine integral. References ========== * https://en.wikipedia.org/wiki/Fresnel_integral * https://dlmf.nist.gov/7 * https://mathworld.wolfram.com/FresnelIntegrals.html * http://functions.wolfram.com/GammaBetaErf/FresnelS * The converging factors for the fresnel integrals by John W. Wrench Jr. and Vicki Alley """ _trigfunc = sin _sign = -Integer(1) @staticmethod @cacheit def taylor_term(n, x, *previous_terms): if n < 0: return Integer(0) else: x = sympify(x) if len(previous_terms) >= 1: p = previous_terms[-1] return (-pi**2*x**4*(4*n - 1)/(8*n*(2*n + 1)*(4*n + 3))) * p else: return x**3 * (-x**4)**n * (Integer(2)**(-2*n - 1)*pi**(2*n + 1)) / ((4*n + 3)*factorial(2*n + 1)) def _eval_rewrite_as_erf(self, z): return (1 + I)/4 * (erf((1 + I)/2*sqrt(pi)*z) - I*erf((1 - I)/2*sqrt(pi)*z)) def _eval_rewrite_as_hyper(self, z): return pi*z**3/6 * hyper([Rational(3, 4)], [Rational(3, 2), Rational(7, 4)], -pi**2*z**4/16) def _eval_rewrite_as_meijerg(self, z): return (pi*z**Rational(9, 4) / (sqrt(2)*(z**2)**Rational(3, 4)*(-z)**Rational(3, 4)) * meijerg([], [1], [Rational(3, 4)], [Rational(1, 4), 0], -pi**2*z**4/16)) def _eval_aseries(self, n, args0, x, logx): from ...series import Order point = args0[0] # Expansion at oo if point is oo: z = self.args[0] # expansion of S(x) = S1(x*sqrt(pi/2)), see reference[5] page 1-8 p = [(-1)**k * factorial(4*k + 1) / (2**(2*k + 2) * z**(4*k + 3) * 2**(2*k)*factorial(2*k)) for k in range(n)] q = [1/(2*z)] + [(-1)**k * factorial(4*k - 1) / (2**(2*k + 1) * z**(4*k + 1) * 2**(2*k - 1)*factorial(2*k - 1)) for k in range(1, n)] p = [-sqrt(2/pi)*t for t in p] + [Order(1/z**n, x)] q = [-sqrt(2/pi)*t for t in q] + [Order(1/z**n, x)] return Rational(1, 2) + (sin(z**2)*Add(*p) + cos(z**2)*Add(*q)).subs({x: sqrt(2/pi)*x}) # All other points are not handled return super()._eval_aseries(n, args0, x, logx) class fresnelc(FresnelIntegral): r""" Fresnel integral C. This function is defined by .. math:: \operatorname{C}(z) = \int_0^z \cos{\frac{\pi}{2} t^2} \mathrm{d}t. It is an entire function. Examples ======== Several special values are known: >>> fresnelc(0) 0 >>> fresnelc(oo) 1/2 >>> fresnelc(-oo) -1/2 >>> fresnelc(I*oo) I/2 >>> fresnelc(-I*oo) -I/2 In general one can pull out factors of -1 and `i` from the argument: >>> fresnelc(-z) -fresnelc(z) >>> fresnelc(I*z) I*fresnelc(z) The Fresnel C integral obeys the mirror symmetry `\overline{C(z)} = C(\bar{z})`: >>> conjugate(fresnelc(z)) fresnelc(conjugate(z)) Differentiation with respect to `z` is supported: >>> diff(fresnelc(z), z) cos(pi*z**2/2) Defining the Fresnel functions via an integral >>> integrate(cos(pi*z**2/2), z) fresnelc(z)*gamma(1/4)/(4*gamma(5/4)) >>> expand_func(integrate(cos(pi*z**2/2), z)) fresnelc(z) We can numerically evaluate the Fresnel integral to arbitrary precision on the whole complex plane: >>> fresnelc(2).evalf(30) 0.488253406075340754500223503357 >>> fresnelc(-2*I).evalf(30) -0.488253406075340754500223503357*I See Also ======== fresnels: Fresnel sine integral. References ========== * https://en.wikipedia.org/wiki/Fresnel_integral * https://dlmf.nist.gov/7 * https://mathworld.wolfram.com/FresnelIntegrals.html * http://functions.wolfram.com/GammaBetaErf/FresnelC * The converging factors for the fresnel integrals by John W. Wrench Jr. and Vicki Alley """ _trigfunc = cos _sign = Integer(1) @staticmethod @cacheit def taylor_term(n, x, *previous_terms): if n < 0: return Integer(0) else: x = sympify(x) if len(previous_terms) >= 1: p = previous_terms[-1] return (-pi**2*x**4*(4*n - 3)/(8*n*(2*n - 1)*(4*n + 1))) * p else: return x * (-x**4)**n * (Integer(2)**(-2*n)*pi**(2*n)) / ((4*n + 1)*factorial(2*n)) def _eval_rewrite_as_erf(self, z): return (1 - I)/4 * (erf((1 + I)/2*sqrt(pi)*z) + I*erf((1 - I)/2*sqrt(pi)*z)) def _eval_rewrite_as_hyper(self, z): return z * hyper([Rational(1, 4)], [Rational(1, 2), Rational(5, 4)], -pi**2*z**4/16) def _eval_rewrite_as_meijerg(self, z): return (pi*z**Rational(3, 4) / (sqrt(2)*root(z**2, 4)*root(-z, 4)) * meijerg([], [1], [Rational(1, 4)], [Rational(3, 4), 0], -pi**2*z**4/16)) def _eval_aseries(self, n, args0, x, logx): from ...series import Order point = args0[0] # Expansion at oo if point is oo: z = self.args[0] # expansion of C(x) = C1(x*sqrt(pi/2)), see reference[5] page 1-8 p = [(-1)**k * factorial(4*k + 1) / (2**(2*k + 2) * z**(4*k + 3) * 2**(2*k)*factorial(2*k)) for k in range(n)] q = [1/(2*z)] + [(-1)**k * factorial(4*k - 1) / (2**(2*k + 1) * z**(4*k + 1) * 2**(2*k - 1)*factorial(2*k - 1)) for k in range(1, n)] p = [-sqrt(2/pi)*t for t in p] + [Order(1/z**n, x)] q = [ sqrt(2/pi)*t for t in q] + [Order(1/z**n, x)] return Rational(1, 2) + (cos(z**2)*Add(*p) + sin(z**2)*Add(*q)).subs({x: sqrt(2/pi)*x}) # All other points are not handled return super()._eval_aseries(n, args0, x, logx) ############################################################################### # ################## HELPER FUNCTIONS ####################################### # ############################################################################### class _erfs(Function): r""" Helper function to make the `\mathrm{erf}(z)` function tractable for the Gruntz algorithm. """ @classmethod def eval(cls, z): r = cls(z, evaluate=False).rewrite('intractable') if r.is_number: return r def _eval_aseries(self, n, args0, x, logx): from ...series import Order point = args0[0] # Expansion at oo if point is oo: z = self.args[0] l = [1/sqrt(pi)*factorial(2*k)*(-Integer(4))**(-k) / factorial(k)*(1/z)**(2*k + 1) for k in range(n)] o = Order(1/z**(2*n + 1), x) # It is very inefficient to first add the order and then do the nseries return (Add(*l))._eval_nseries(x, n, logx) + o # Expansion at I*oo t = point.extract_multiplicatively(I) if t is oo: z = self.args[0] # TODO: is the series really correct? l = [1/sqrt(pi)*factorial(2*k)*(-Integer(4))**(-k) / factorial(k)*(1/z)**(2*k + 1) for k in range(n)] o = Order(1/z**(2*n + 1), x) # It is very inefficient to first add the order and then do the nseries return (Add(*l))._eval_nseries(x, n, logx) + o # All other points are not handled return super()._eval_aseries(n, args0, x, logx) def fdiff(self, argindex=1): if argindex == 1: z = self.args[0] return -2/sqrt(pi) + 2*z*_erfs(z) else: raise ArgumentIndexError(self, argindex) def _eval_rewrite_as_intractable(self, z): return (1 - erf(z))*exp(z**2) class _eis(Function): r""" Helper function to make the `\mathrm{Ei}(z)` and `\mathrm{li}(z)` functions tractable for the Gruntz algorithm. """ def _eval_aseries(self, n, args0, x, logx): from ...series import Order if args0[0] != oo: return super()._eval_aseries(n, args0, x, logx) z = self.args[0] l = [ factorial(k) * (1/z)**(k + 1) for k in range(n) ] o = Order(1/z**(n + 1), x) # It is very inefficient to first add the order and then do the nseries return (Add(*l))._eval_nseries(x, n, logx) + o def fdiff(self, argindex=1): if argindex == 1: z = self.args[0] return 1/z - _eis(z) else: raise ArgumentIndexError(self, argindex) def _eval_rewrite_as_intractable(self, z): return exp(-z)*Ei(z) def _eval_nseries(self, x, n, logx): x0 = self.args[0].limit(x, 0) if x0 == 0: f = self._eval_rewrite_as_intractable(*self.args) return f._eval_nseries(x, n, logx) return super()._eval_nseries(x, n, logx) def _eval_evalf(self, prec): return self.rewrite('intractable').evalf(prec)
import pytest from diofant import (DiracDelta, Eq, Heaviside, I, Piecewise, Rational, Symbol, adjoint, conjugate, nan, pi, sign, sqrt, symbols, transpose) from diofant.abc import x, y, z from diofant.core.function import ArgumentIndexError __all__ = () def test_DiracDelta(): i = Symbol('i', nonzero=True) j = Symbol('j', positive=True) k = Symbol('k', negative=True) assert DiracDelta(1) == 0 assert DiracDelta(5.1) == 0 assert DiracDelta(-pi) == 0 assert DiracDelta(5, 7) == 0 assert DiracDelta(i) == 0 assert DiracDelta(j) == 0 assert DiracDelta(k) == 0 assert DiracDelta(nan) == nan assert isinstance(DiracDelta(0), DiracDelta) assert isinstance(DiracDelta(x), DiracDelta) assert adjoint(DiracDelta(x)) == DiracDelta(x) assert adjoint(DiracDelta(x - y)) == DiracDelta(x - y) assert conjugate(DiracDelta(x)) == DiracDelta(x) assert conjugate(DiracDelta(x - y)) == DiracDelta(x - y) assert transpose(DiracDelta(x)) == DiracDelta(x) assert transpose(DiracDelta(x - y)) == DiracDelta(x - y) assert DiracDelta(x).diff(x) == DiracDelta(x, 1) assert DiracDelta(x, 1).diff(x) == DiracDelta(x, 2) assert DiracDelta(x).is_simple(x) is True assert DiracDelta(3*x).is_simple(x) is True assert DiracDelta(x**2).is_simple(x) is False assert DiracDelta(sqrt(x)).is_simple(x) is False assert DiracDelta(x).is_simple(y) is False assert DiracDelta(x*y).simplify(x) == DiracDelta(x)/abs(y) assert DiracDelta(x*y).simplify(y) == DiracDelta(y)/abs(x) assert DiracDelta(x**2*y).simplify(x) == DiracDelta(x**2*y) assert DiracDelta(y).simplify(x) == DiracDelta(y) assert DiracDelta((x - 1)*(x - 2)*(x - 3)).simplify(x) == \ DiracDelta(x - 3)/2 + DiracDelta(x - 2) + DiracDelta(x - 1)/2 pytest.raises(ArgumentIndexError, lambda: DiracDelta(x).fdiff(2)) pytest.raises(ValueError, lambda: DiracDelta(x, -1)) def test_heaviside(): x, y = symbols('x, y', extended_real=True) assert Heaviside(0) == 0.5 assert Heaviside(-5) == 0 assert Heaviside(1) == 1 assert Heaviside(nan) == nan assert Heaviside(x).is_real assert adjoint(Heaviside(x)) == Heaviside(x) assert adjoint(Heaviside(x - y)) == Heaviside(x - y) assert conjugate(Heaviside(x)) == Heaviside(x) assert conjugate(Heaviside(x - y)) == Heaviside(x - y) assert transpose(Heaviside(x)) == Heaviside(x) assert transpose(Heaviside(x - y)) == Heaviside(x - y) assert Heaviside(x).diff(x) == DiracDelta(x) assert Heaviside(z + I).is_Function is True assert Heaviside(I*z).is_Function is True pytest.raises(ArgumentIndexError, lambda: Heaviside(x).fdiff(2)) pytest.raises(ValueError, lambda: Heaviside(I)) pytest.raises(ValueError, lambda: Heaviside(2 + 3*I)) def test_rewrite(): x = Symbol('x', extended_real=True) assert Heaviside(x).rewrite(Piecewise) == \ Piecewise((1, x > 0), (Rational(1, 2), Eq(x, 0)), (0, True)) assert Heaviside(y).rewrite(Piecewise) == Heaviside(y) assert Heaviside(x).rewrite(sign) == (sign(x)+1)/2 assert Heaviside(y).rewrite(sign) == Heaviside(y)
diofant/diofant
diofant/tests/functions/test_delta_functions.py
diofant/functions/special/error_functions.py
from ..core import Basic, Symbol, cacheit, sympify from ..functions import cos, sin from ..matrices import ImmutableMatrix, eye, rot_axis1, rot_axis2, rot_axis3 class Orienter(Basic): """Super-class for all orienter classes.""" def rotation_matrix(self): """ The rotation matrix corresponding to this orienter instance. """ return self._parent_orient class AxisOrienter(Orienter): """Class to denote an axis orienter.""" def __new__(cls, angle, axis): from .vector import Vector if not isinstance(axis, Vector): raise TypeError('axis should be a Vector') angle = sympify(angle) obj = super().__new__(cls, angle, axis) obj._angle = angle obj._axis = axis return obj def __init__(self, angle, axis): """ Axis rotation is a rotation about an arbitrary axis by some angle. The angle is supplied as a Diofant expr scalar, and the axis is supplied as a Vector. Parameters ========== angle : Expr The angle by which the new system is to be rotated axis : Vector The axis around which the rotation has to be performed Examples ======== >>> from diofant.vector import CoordSysCartesian >>> q1 = symbols('q1') >>> N = CoordSysCartesian('N') >>> orienter = AxisOrienter(q1, N.i + 2 * N.j) >>> B = N.orient_new('B', [orienter]) """ # Dummy initializer for docstrings @cacheit def rotation_matrix(self, system): """ The rotation matrix corresponding to this orienter instance. Parameters ========== system : CoordSysCartesian The coordinate system wrt which the rotation matrix is to be computed """ from .functions import express axis = express(self.axis, system).normalize() axis = axis.to_matrix(system) theta = self.angle parent_orient = ((eye(3) - axis * axis.T) * cos(theta) + ImmutableMatrix([[0, -axis[2], axis[1]], [axis[2], 0, -axis[0]], [-axis[1], axis[0], 0]]) * sin(theta) + axis * axis.T) parent_orient = parent_orient.T return parent_orient @property def angle(self): return self._angle @property def axis(self): return self._axis class ThreeAngleOrienter(Orienter): """Super-class for Body and Space orienters.""" def __new__(cls, angle1, angle2, angle3, rot_order): approved_orders = ('123', '231', '312', '132', '213', '321', '121', '131', '212', '232', '313', '323', '') original_rot_order = rot_order rot_order = str(rot_order).upper() if len(rot_order) != 3: raise TypeError('rot_order should be a str of length 3') rot_order = [i.replace('X', '1') for i in rot_order] rot_order = [i.replace('Y', '2') for i in rot_order] rot_order = [i.replace('Z', '3') for i in rot_order] rot_order = ''.join(rot_order) if rot_order not in approved_orders: raise TypeError('Invalid rot_type parameter') a1 = int(rot_order[0]) a2 = int(rot_order[1]) a3 = int(rot_order[2]) angle1 = sympify(angle1) angle2 = sympify(angle2) angle3 = sympify(angle3) if cls._in_order: parent_orient = (_rot(a1, angle1) * _rot(a2, angle2) * _rot(a3, angle3)) else: parent_orient = (_rot(a3, angle3) * _rot(a2, angle2) * _rot(a1, angle1)) parent_orient = parent_orient.T if not isinstance(original_rot_order, Symbol): original_rot_order = Symbol(original_rot_order) obj = super().__new__(cls, angle1, angle2, angle3, original_rot_order) obj._angle1 = angle1 obj._angle2 = angle2 obj._angle3 = angle3 obj._rot_order = str(original_rot_order) obj._parent_orient = parent_orient return obj @property def angle1(self): return self._angle1 @property def angle2(self): return self._angle2 @property def angle3(self): return self._angle3 @property def rot_order(self): return self._rot_order class BodyOrienter(ThreeAngleOrienter): """Class to denote a body-orienter.""" _in_order = True def __new__(cls, angle1, angle2, angle3, rot_order): obj = ThreeAngleOrienter.__new__(cls, angle1, angle2, angle3, rot_order) return obj def __init__(self, angle1, angle2, angle3, rot_order): """ Body orientation takes this coordinate system through three successive simple rotations. Body fixed rotations include both Euler Angles and Tait-Bryan Angles, see https://en.wikipedia.org/wiki/Euler_angles. Parameters ========== angle1, angle2, angle3 : Expr Three successive angles to rotate the coordinate system by rotation_order : string String defining the order of axes for rotation Examples ======== >>> from diofant.vector import CoordSysCartesian >>> q1, q2, q3 = symbols('q1 q2 q3') >>> N = CoordSysCartesian('N') A 'Body' fixed rotation is described by three angles and three body-fixed rotation axes. To orient a coordinate system D with respect to N, each sequential rotation is always about the orthogonal unit vectors fixed to D. For example, a '123' rotation will specify rotations about N.i, then D.j, then D.k. (Initially, D.i is same as N.i) Therefore, >>> body_orienter = BodyOrienter(q1, q2, q3, '123') >>> D = N.orient_new('D', [body_orienter]) is same as >>> axis_orienter1 = AxisOrienter(q1, N.i) >>> D = N.orient_new('D', [axis_orienter1]) >>> axis_orienter2 = AxisOrienter(q2, D.j) >>> D = D.orient_new('D', [axis_orienter2]) >>> axis_orienter3 = AxisOrienter(q3, D.k) >>> D = D.orient_new('D', [axis_orienter3]) Acceptable rotation orders are of length 3, expressed in XYZ or 123, and cannot have a rotation about about an axis twice in a row. >>> body_orienter1 = BodyOrienter(q1, q2, q3, '123') >>> body_orienter2 = BodyOrienter(q1, q2, 0, 'ZXZ') >>> body_orienter3 = BodyOrienter(0, 0, 0, 'XYX') """ # Dummy initializer for docstrings class SpaceOrienter(ThreeAngleOrienter): """Class to denote a space-orienter.""" _in_order = False def __new__(cls, angle1, angle2, angle3, rot_order): obj = ThreeAngleOrienter.__new__(cls, angle1, angle2, angle3, rot_order) return obj def __init__(self, angle1, angle2, angle3, rot_order): """ Space rotation is similar to Body rotation, but the rotations are applied in the opposite order. Parameters ========== angle1, angle2, angle3 : Expr Three successive angles to rotate the coordinate system by rotation_order : string String defining the order of axes for rotation See Also ======== BodyOrienter : Orienter to orient systems wrt Euler angles. Examples ======== >>> from diofant.vector import CoordSysCartesian >>> q1, q2, q3 = symbols('q1 q2 q3') >>> N = CoordSysCartesian('N') To orient a coordinate system D with respect to N, each sequential rotation is always about N's orthogonal unit vectors. For example, a '123' rotation will specify rotations about N.i, then N.j, then N.k. Therefore, >>> space_orienter = SpaceOrienter(q1, q2, q3, '312') >>> D = N.orient_new('D', [space_orienter]) is same as >>> axis_orienter1 = AxisOrienter(q1, N.i) >>> B = N.orient_new('B', [axis_orienter1]) >>> axis_orienter2 = AxisOrienter(q2, N.j) >>> C = B.orient_new('C', [axis_orienter2]) >>> axis_orienter3 = AxisOrienter(q3, N.k) >>> D = C.orient_new('C', [axis_orienter3]) """ # Dummy initializer for docstrings class QuaternionOrienter(Orienter): """Class to denote a quaternion-orienter.""" def __new__(cls, q0, q1, q2, q3): q0 = sympify(q0) q1 = sympify(q1) q2 = sympify(q2) q3 = sympify(q3) parent_orient = (ImmutableMatrix([[q0 ** 2 + q1 ** 2 - q2 ** 2 - q3 ** 2, 2 * (q1 * q2 - q0 * q3), 2 * (q0 * q2 + q1 * q3)], [2 * (q1 * q2 + q0 * q3), q0 ** 2 - q1 ** 2 + q2 ** 2 - q3 ** 2, 2 * (q2 * q3 - q0 * q1)], [2 * (q1 * q3 - q0 * q2), 2 * (q0 * q1 + q2 * q3), q0 ** 2 - q1 ** 2 - q2 ** 2 + q3 ** 2]])) parent_orient = parent_orient.T obj = super().__new__(cls, q0, q1, q2, q3) obj._q0 = q0 obj._q1 = q1 obj._q2 = q2 obj._q3 = q3 obj._parent_orient = parent_orient return obj def __init__(self, angle1, angle2, angle3, rot_order): """ Quaternion orientation orients the new CoordSysCartesian with Quaternions, defined as a finite rotation about lambda, a unit vector, by some amount theta. This orientation is described by four parameters: q0 = cos(theta/2) q1 = lambda_x sin(theta/2) q2 = lambda_y sin(theta/2) q3 = lambda_z sin(theta/2) Quaternion does not take in a rotation order. Parameters ========== q0, q1, q2, q3 : Expr The quaternions to rotate the coordinate system by Examples ======== >>> from diofant.vector import CoordSysCartesian >>> q0, q1, q2, q3 = symbols('q0 q1 q2 q3') >>> N = CoordSysCartesian('N') >>> q_orienter = QuaternionOrienter(q0, q1, q2, q3) >>> B = N.orient_new('B', [q_orienter]) """ # Dummy initializer for docstrings @property def q0(self): return self._q0 @property def q1(self): return self._q1 @property def q2(self): return self._q2 @property def q3(self): return self._q3 def _rot(axis, angle): """DCM for simple axis 1, 2 or 3 rotations.""" if axis == 1: return ImmutableMatrix(rot_axis1(angle).T) elif axis == 2: return ImmutableMatrix(rot_axis2(angle).T) elif axis == 3: return ImmutableMatrix(rot_axis3(angle).T) else: raise NotImplementedError
import pytest from diofant import (DiracDelta, Eq, Heaviside, I, Piecewise, Rational, Symbol, adjoint, conjugate, nan, pi, sign, sqrt, symbols, transpose) from diofant.abc import x, y, z from diofant.core.function import ArgumentIndexError __all__ = () def test_DiracDelta(): i = Symbol('i', nonzero=True) j = Symbol('j', positive=True) k = Symbol('k', negative=True) assert DiracDelta(1) == 0 assert DiracDelta(5.1) == 0 assert DiracDelta(-pi) == 0 assert DiracDelta(5, 7) == 0 assert DiracDelta(i) == 0 assert DiracDelta(j) == 0 assert DiracDelta(k) == 0 assert DiracDelta(nan) == nan assert isinstance(DiracDelta(0), DiracDelta) assert isinstance(DiracDelta(x), DiracDelta) assert adjoint(DiracDelta(x)) == DiracDelta(x) assert adjoint(DiracDelta(x - y)) == DiracDelta(x - y) assert conjugate(DiracDelta(x)) == DiracDelta(x) assert conjugate(DiracDelta(x - y)) == DiracDelta(x - y) assert transpose(DiracDelta(x)) == DiracDelta(x) assert transpose(DiracDelta(x - y)) == DiracDelta(x - y) assert DiracDelta(x).diff(x) == DiracDelta(x, 1) assert DiracDelta(x, 1).diff(x) == DiracDelta(x, 2) assert DiracDelta(x).is_simple(x) is True assert DiracDelta(3*x).is_simple(x) is True assert DiracDelta(x**2).is_simple(x) is False assert DiracDelta(sqrt(x)).is_simple(x) is False assert DiracDelta(x).is_simple(y) is False assert DiracDelta(x*y).simplify(x) == DiracDelta(x)/abs(y) assert DiracDelta(x*y).simplify(y) == DiracDelta(y)/abs(x) assert DiracDelta(x**2*y).simplify(x) == DiracDelta(x**2*y) assert DiracDelta(y).simplify(x) == DiracDelta(y) assert DiracDelta((x - 1)*(x - 2)*(x - 3)).simplify(x) == \ DiracDelta(x - 3)/2 + DiracDelta(x - 2) + DiracDelta(x - 1)/2 pytest.raises(ArgumentIndexError, lambda: DiracDelta(x).fdiff(2)) pytest.raises(ValueError, lambda: DiracDelta(x, -1)) def test_heaviside(): x, y = symbols('x, y', extended_real=True) assert Heaviside(0) == 0.5 assert Heaviside(-5) == 0 assert Heaviside(1) == 1 assert Heaviside(nan) == nan assert Heaviside(x).is_real assert adjoint(Heaviside(x)) == Heaviside(x) assert adjoint(Heaviside(x - y)) == Heaviside(x - y) assert conjugate(Heaviside(x)) == Heaviside(x) assert conjugate(Heaviside(x - y)) == Heaviside(x - y) assert transpose(Heaviside(x)) == Heaviside(x) assert transpose(Heaviside(x - y)) == Heaviside(x - y) assert Heaviside(x).diff(x) == DiracDelta(x) assert Heaviside(z + I).is_Function is True assert Heaviside(I*z).is_Function is True pytest.raises(ArgumentIndexError, lambda: Heaviside(x).fdiff(2)) pytest.raises(ValueError, lambda: Heaviside(I)) pytest.raises(ValueError, lambda: Heaviside(2 + 3*I)) def test_rewrite(): x = Symbol('x', extended_real=True) assert Heaviside(x).rewrite(Piecewise) == \ Piecewise((1, x > 0), (Rational(1, 2), Eq(x, 0)), (0, True)) assert Heaviside(y).rewrite(Piecewise) == Heaviside(y) assert Heaviside(x).rewrite(sign) == (sign(x)+1)/2 assert Heaviside(y).rewrite(sign) == Heaviside(y)
diofant/diofant
diofant/tests/functions/test_delta_functions.py
diofant/vector/orienters.py
""" Algorithms for solving the Risch differential equation. Given a differential field K of characteristic 0 that is a simple monomial extension of a base field k and f, g in K, the Risch Differential Equation problem is to decide if there exist y in K such that Dy + f*y == g and to find one if there are some. If t is a monomial over k and the coefficients of f and g are in k(t), then y is in k(t), and the outline of the algorithm here is given as: 1. Compute the normal part n of the denominator of y. The problem is then reduced to finding y' in k<t>, where y == y'/n. 2. Compute the special part s of the denominator of y. The problem is then reduced to finding y'' in k[t], where y == y''/(n*s) 3. Bound the degree of y''. 4. Reduce the equation Dy + f*y == g to a similar equation with f, g in k[t]. 5. Find the solutions in k[t] of bounded degree of the reduced equation. See Chapter 6 of "Symbolic Integration I: Transcendental Functions" by Manuel Bronstein. See also the docstring of risch.py. """ import functools import operator from ..core import Dummy, oo from ..domains import ZZ from ..functions import im, re, sqrt from ..polys import Poly, cancel, gcd from .risch import (DecrementLevel, NonElementaryIntegralException, derivation, frac_in, gcdex_diophantine, recognize_log_derivative, splitfactor) # TODO: Add messages to NonElementaryIntegralException errors def order_at(a, p, t): """ Computes the order of a at p, with respect to t. For a, p in k[t], the order of a at p is defined as nu_p(a) = max({n in Z+ such that p**n|a}), where a != 0. If a == 0, nu_p(a) = +oo. To compute the order at a rational function, a/b, use the fact that nu_p(a/b) == nu_p(a) - nu_p(b). """ if a.is_zero: return oo if p == Poly(t, t): return a.as_poly(t).ET()[0][0] # Uses binary search for calculating the power. power_list collects the tuples # (p^k,k) where each k is some power of 2. After deciding the largest k # such that k is power of 2 and p^k|a the loop iteratively calculates # the actual power. power_list = [] p1 = p r = a.rem(p1) tracks_power = 1 while r.is_zero: power_list.append((p1, tracks_power)) p1 = p1*p1 tracks_power *= 2 r = a.rem(p1) n = 0 product = Poly(1, t) while len(power_list) != 0: final = power_list.pop() productf = product*final[0] r = a.rem(productf) if r.is_zero: n += final[1] product = productf return n def order_at_oo(a, d, t): """ Computes the order of a/d at oo (infinity), with respect to t. For f in k(t), the order or f at oo is defined as deg(d) - deg(a), where f == a/d. """ if a.is_zero: return oo return d.degree(t) - a.degree(t) def weak_normalizer(a, d, DE, z=None): """ Weak normalization. Given a derivation D on k[t] and f == a/d in k(t), return q in k[t] such that f - Dq/q is weakly normalized with respect to t. f in k(t) is said to be "weakly normalized" with respect to t if residue_p(f) is not a positive integer for any normal irreducible p in k[t] such that f is in R_p (Definition 6.1.1). If f has an elementary integral, this is equivalent to no logarithm of integral(f) whose argument depends on t has a positive integer coefficient, where the arguments of the logarithms not in k(t) are in k[t]. Returns (q, f - Dq/q) """ z = z or Dummy('z') dn, ds = splitfactor(d, DE) # Compute d1, where dn == d1*d2**2*...*dn**n is a square-free # factorization of d. g = gcd(dn, dn.diff(DE.t)) d_sqf_part = dn.quo(g) d1 = d_sqf_part.quo(gcd(d_sqf_part, g)) a1, b = gcdex_diophantine(d.quo(d1).as_poly(DE.t), d1.as_poly(DE.t), a.as_poly(DE.t)) r = (a - Poly(z, DE.t)*derivation(d1, DE)).as_poly(DE.t).resultant( d1.as_poly(DE.t)) r = Poly(r, z) if not r.has(z): return Poly(1, DE.t), (a, d) N = [i for i in r.real_roots() if i in ZZ and i > 0] q = functools.reduce(operator.mul, [gcd(a - Poly(n, DE.t)*derivation(d1, DE), d1) for n in N], Poly(1, DE.t)) q = q.monic() dq = derivation(q, DE) sn = q*a - d*dq sd = q*d sn, sd = sn.cancel(sd, include=True) return q, (sn, sd) def normal_denom(fa, fd, ga, gd, DE): """ Normal part of the denominator. Given a derivation D on k[t] and f, g in k(t) with f weakly normalized with respect to t, either raise NonElementaryIntegralException, in which case the equation Dy + f*y == g has no solution in k(t), or the quadruplet (a, b, c, h) such that a, h in k[t], b, c in k<t>, and for any solution y in k(t) of Dy + f*y == g, q = y*h in k<t> satisfies a*Dq + b*q == c. This constitutes step 1 in the outline given in the rde.py docstring. """ dn, ds = splitfactor(fd, DE) en, es = splitfactor(gd, DE) p = dn.gcd(en) h = en.gcd(en.diff(DE.t)).quo(p.gcd(p.diff(DE.t))) a = dn*h c = a*h if c.div(en)[1]: # en does not divide dn*h**2 raise NonElementaryIntegralException ca = c*ga ca, cd = ca.cancel(gd, include=True) ba = a*fa - dn*derivation(h, DE)*fd ba, bd = ba.cancel(fd, include=True) # (dn*h, dn*h*f - dn*Dh, dn*h**2*g, h) return a, (ba, bd), (ca, cd), h def special_denom(a, ba, bd, ca, cd, DE, case='auto'): """ Special part of the denominator. case is one of {'exp', 'tan', 'primitive'} for the hyperexponential, hypertangent, and primitive cases, respectively. For the hyperexponential (resp. hypertangent) case, given a derivation D on k[t] and a in k[t], b, c, in k<t> with Dt/t in k (resp. Dt/(t**2 + 1) in k, sqrt(-1) not in k), a != 0, and gcd(a, t) == 1 (resp. gcd(a, t**2 + 1) == 1), return the quadruplet (A, B, C, 1/h) such that A, B, C, h in k[t] and for any solution q in k<t> of a*Dq + b*q == c, r = qh in k[t] satisfies A*Dr + B*r == C. For case == 'primitive', k<t> == k[t], so it returns (a, b, c, 1) in this case. This constitutes step 2 of the outline given in the rde.py docstring. """ from .prde import parametric_log_deriv # TODO: finish writing this and write tests if case == 'auto': case = DE.case if case == 'exp': p = Poly(DE.t, DE.t) elif case == 'tan': p = Poly(DE.t**2 + 1, DE.t) elif case in ['primitive', 'base']: B = ba.to_field().quo(bd) C = ca.to_field().quo(cd) return a, B, C, Poly(1, DE.t) else: raise ValueError("case must be one of {'exp', 'tan', 'primitive', " f"'base'}}, not {case!s}.") nb = order_at(ba, p, DE.t) - order_at(bd, p, DE.t) nc = order_at(ca, p, DE.t) - order_at(cd, p, DE.t) n = min(0, nc - min(0, nb)) if not nb: # Possible cancellation. if case == 'exp': dcoeff = DE.d.quo(Poly(DE.t, DE.t)) # We are guaranteed to not have problems, # because case != 'base'. with DecrementLevel(DE): alphaa, alphad = frac_in(-ba.eval(0)/bd.eval(0)/a.eval(0), DE.t) etaa, etad = frac_in(dcoeff, DE.t) A = parametric_log_deriv(alphaa, alphad, etaa, etad, DE) if A is not None: a, m, z = A if a == 1: n = min(n, m) elif case == 'tan': dcoeff = DE.d.quo(Poly(DE.t**2+1, DE.t)) # We are guaranteed to not have problems, # because case != 'base'. with DecrementLevel(DE): alphaa, alphad = frac_in(im(-ba.eval(sqrt(-1))/bd.eval(sqrt(-1))/a.eval(sqrt(-1))), DE.t) betaa, betad = frac_in(re(-ba.eval(sqrt(-1))/bd.eval(sqrt(-1))/a.eval(sqrt(-1))), DE.t) etaa, etad = frac_in(dcoeff, DE.t) if recognize_log_derivative(2*betaa, betad, DE): A = parametric_log_deriv(alphaa*sqrt(-1)*betad+alphad*betaa, alphad*betad, etaa, etad, DE) if A is not None: a, m, z = A if a == 1: n = min(n, m) N = max(0, -nb, n - nc) pN = p**N pn = p**-n A = a*pN B = ba*pN.quo(bd) + Poly(n, DE.t)*a*derivation(p, DE).quo(p)*pN C = (ca*pN*pn).quo(cd) h = pn # (a*p**N, (b + n*a*Dp/p)*p**N, c*p**(N - n), p**-n) return A, B, C, h def bound_degree(a, b, cQ, DE, case='auto', parametric=False): """ Bound on polynomial solutions. Given a derivation D on k[t] and a, b, c in k[t] with a != 0, return n in ZZ such that deg(q) <= n for any solution q in k[t] of a*Dq + b*q == c, when parametric=False, or deg(q) <= n for any solution c1, ..., cm in Const(k) and q in k[t] of a*Dq + b*q == Sum(ci*gi, (i, 1, m)) when parametric=True. For parametric=False, cQ is c, a Poly; for parametric=True, cQ is Q == [q1, ..., qm], a list of Polys. This constitutes step 3 of the outline given in the rde.py docstring. """ from .prde import (is_log_deriv_k_t_radical_in_field, limited_integrate, parametric_log_deriv) # TODO: finish writing this and write tests if case == 'auto': case = DE.case da = a.degree(DE.t) db = b.degree(DE.t) # The parametric and regular cases are identical, except for this part if parametric: dc = max(i.degree(DE.t) for i in cQ) else: dc = cQ.degree(DE.t) alpha = cancel(-b.as_poly(DE.t).LC().as_expr() / a.as_poly(DE.t).LC().as_expr()) if case == 'base': n = max(0, dc - max(db, da - 1)) if db == da - 1 and alpha.is_Integer: n = max(0, alpha, dc - db) elif case == 'primitive': if db > da: n = max(0, dc - db) else: n = max(0, dc - da + 1) etaa, etad = frac_in(DE.d, DE.T[DE.level - 1]) t1 = DE.t with DecrementLevel(DE): alphaa, alphad = frac_in(alpha, DE.t) if db == da - 1: # if alpha == m*Dt + Dz for z in k and m in ZZ: try: (za, zd), m = limited_integrate(alphaa, alphad, [(etaa, etad)], DE) except NonElementaryIntegralException: pass else: if len(m) != 1: raise ValueError('Length of m should be 1') n = max(n, m[0]) elif db == da: # if alpha == Dz/z for z in k*: # beta = -lc(a*Dz + b*z)/(z*lc(a)) # if beta == m*Dt + Dw for w in k and m in ZZ: # n = max(n, m) A = is_log_deriv_k_t_radical_in_field(alphaa, alphad, DE) if A is not None: aa, z = A if aa == 1: beta = -(a*derivation(z, DE).as_poly(t1) + b*z.as_poly(t1)).LC()/(z.as_expr()*a.LC()) betaa, betad = frac_in(beta, DE.t) try: (za, zd), m = limited_integrate(betaa, betad, [(etaa, etad)], DE) except NonElementaryIntegralException: pass else: if len(m) != 1: raise ValueError('Length of m should be 1') n = max(n, m[0]) elif case == 'exp': n = max(0, dc - max(db, da)) if da == db: etaa, etad = frac_in(DE.d.quo(Poly(DE.t, DE.t)), DE.T[DE.level - 1]) with DecrementLevel(DE): alphaa, alphad = frac_in(alpha, DE.t) A = parametric_log_deriv(alphaa, alphad, etaa, etad, DE) if A is not None: # if alpha == m*Dt/t + Dz/z for z in k* and m in ZZ: # n = max(n, m) a, m, z = A if a == 1: n = max(n, m) elif case in ['tan', 'other_nonlinear']: delta = DE.d.degree(DE.t) lam = DE.d.LC() alpha = cancel(alpha/lam) n = max(0, dc - max(da + delta - 1, db)) if db == da + delta - 1 and alpha.is_Integer: n = max(0, alpha, dc - db) else: raise ValueError("case must be one of {'exp', 'tan', 'primitive', " f"'other_nonlinear', 'base'}}, not {case!s}.") return n def spde(a, b, c, n, DE): """ Rothstein's Special Polynomial Differential Equation algorithm. Given a derivation D on k[t], an integer n and a, b, c in k[t] with a != 0, either raise NonElementaryIntegralException, in which case the equation a*Dq + b*q == c has no solution of degree at most n in k[t], or return the tuple (B, C, m, alpha, beta) such that B, C, alpha, beta in k[t], m in ZZ, and any solution q in k[t] of degree at most n of a*Dq + b*q == c must be of the form q == alpha*h + beta, where h in k[t], deg(h) <= m, and Dh + B*h == C. This constitutes step 4 of the outline given in the rde.py docstring. """ zero = Poly(0, DE.t) alpha = Poly(1, DE.t) beta = Poly(0, DE.t) while True: if c.is_zero: return zero, zero, 0, zero, beta # -1 is more to the point if (n < 0) is True: raise NonElementaryIntegralException g = a.gcd(b) if not c.rem(g).is_zero: # g does not divide c raise NonElementaryIntegralException a, b, c = a.quo(g), b.quo(g), c.quo(g) if a.degree(DE.t) == 0: b = b.to_field().quo(a) c = c.to_field().quo(a) return b, c, n, alpha, beta r, z = gcdex_diophantine(b, a, c) b += derivation(a, DE) c = z - derivation(r, DE) n -= a.degree(DE.t) beta += alpha * r alpha *= a def no_cancel_b_large(b, c, n, DE): """ Poly Risch Differential Equation - No cancellation: deg(b) large enough. Given a derivation D on k[t], n either an integer or +oo, and b, c in k[t] with b != 0 and either D == d/dt or deg(b) > max(0, deg(D) - 1), either raise NonElementaryIntegralException, in which case the equation Dq + b*q == c has no solution of degree at most n in k[t], or a solution q in k[t] of this equation with deg(q) < n. """ q = Poly(0, DE.t) while not c.is_zero: m = c.degree(DE.t) - b.degree(DE.t) if not 0 <= m <= n: # n < 0 or m < 0 or m > n raise NonElementaryIntegralException p = Poly(c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC()*DE.t**m, DE.t, expand=False) q = q + p n = m - 1 c = c - derivation(p, DE) - b*p return q def no_cancel_b_small(b, c, n, DE): """ Poly Risch Differential Equation - No cancellation: deg(b) small enough. Given a derivation D on k[t], n either an integer or +oo, and b, c in k[t] with deg(b) < deg(D) - 1 and either D == d/dt or deg(D) >= 2, either raise NonElementaryIntegralException, in which case the equation Dq + b*q == c has no solution of degree at most n in k[t], or a solution q in k[t] of this equation with deg(q) <= n, or the tuple (h, b0, c0) such that h in k[t], b0, c0, in k, and for any solution q in k[t] of degree at most n of Dq + bq == c, y == q - h is a solution in k of Dy + b0*y == c0. """ q = Poly(0, DE.t) while not c.is_zero: if n == 0: m = 0 else: m = c.degree(DE.t) - DE.d.degree(DE.t) + 1 if not 0 <= m <= n: # n < 0 or m < 0 or m > n raise NonElementaryIntegralException if m > 0: p = Poly(c.as_poly(DE.t).LC()/(m*DE.d.as_poly(DE.t).LC())*DE.t**m, DE.t, expand=False) else: if b.degree(DE.t) != c.degree(DE.t): raise NonElementaryIntegralException if b.degree(DE.t) == 0: return (q, b.as_poly(DE.T[DE.level - 1]), c.as_poly(DE.T[DE.level - 1])) p = Poly(c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC(), DE.t, expand=False) q = q + p n = m - 1 c = c - derivation(p, DE) - b*p return q # TODO: better name for this function def no_cancel_equal(b, c, n, DE): """ Poly Risch Differential Equation - No cancellation: deg(b) == deg(D) - 1 Given a derivation D on k[t] with deg(D) >= 2, n either an integer or +oo, and b, c in k[t] with deg(b) == deg(D) - 1, either raise NonElementaryIntegralException, in which case the equation Dq + b*q == c has no solution of degree at most n in k[t], or a solution q in k[t] of this equation with deg(q) <= n, or the tuple (h, m, C) such that h in k[t], m in ZZ, and C in k[t], and for any solution q in k[t] of degree at most n of Dq + b*q == c, y == q - h is a solution in k[t] of degree at most m of Dy + b*y == C. """ q = Poly(0, DE.t) lc = cancel(-b.as_poly(DE.t).LC()/DE.d.as_poly(DE.t).LC()) if lc.is_Integer and lc.is_positive: M = lc else: M = -1 while not c.is_zero: m = max(M, c.degree(DE.t) - DE.d.degree(DE.t) + 1) if not 0 <= m <= n: # n < 0 or m < 0 or m > n raise NonElementaryIntegralException u = cancel(m*DE.d.as_poly(DE.t).LC() + b.as_poly(DE.t).LC()) if u.is_zero: return q, m, c if m > 0: p = Poly(c.as_poly(DE.t).LC()/u*DE.t**m, DE.t, expand=False) else: if c.degree(DE.t) != DE.d.degree(DE.t) - 1: raise NonElementaryIntegralException else: p = c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC() q = q + p n = m - 1 c = c - derivation(p, DE) - b*p return q def cancel_primitive(b, c, n, DE): """ Poly Risch Differential Equation - Cancellation: Primitive case. Given a derivation D on k[t], n either an integer or +oo, b in k, and c in k[t] with Dt in k and b != 0, either raise NonElementaryIntegralException, in which case the equation Dq + b*q == c has no solution of degree at most n in k[t], or a solution q in k[t] of this equation with deg(q) <= n. """ from .prde import is_log_deriv_k_t_radical_in_field with DecrementLevel(DE): ba, bd = frac_in(b, DE.t) A = is_log_deriv_k_t_radical_in_field(ba, bd, DE) if A is not None: n, z = A if n == 1: # b == Dz/z raise NotImplementedError('is_deriv_in_field() is required to ' ' solve this problem.') # if z*c == Dp for p in k[t] and deg(p) <= n: # return p/z # else: # raise NonElementaryIntegralException if c.is_zero: return c # return 0 if n < c.degree(DE.t): raise NonElementaryIntegralException q = Poly(0, DE.t) while not c.is_zero: m = c.degree(DE.t) if n < m: raise NonElementaryIntegralException with DecrementLevel(DE): a2a, a2d = frac_in(c.LC(), DE.t) sa, sd = rischDE(ba, bd, a2a, a2d, DE) stm = Poly(sa.as_expr()/sd.as_expr()*DE.t**m, DE.t, expand=False) q += stm n = m - 1 c -= b*stm + derivation(stm, DE) return q def cancel_exp(b, c, n, DE): """ Poly Risch Differential Equation - Cancellation: Hyperexponential case. Given a derivation D on k[t], n either an integer or +oo, b in k, and c in k[t] with Dt/t in k and b != 0, either raise NonElementaryIntegralException, in which case the equation Dq + b*q == c has no solution of degree at most n in k[t], or a solution q in k[t] of this equation with deg(q) <= n. """ from .prde import parametric_log_deriv eta = DE.d.quo(Poly(DE.t, DE.t)).as_expr() with DecrementLevel(DE): etaa, etad = frac_in(eta, DE.t) ba, bd = frac_in(b, DE.t) A = parametric_log_deriv(ba, bd, etaa, etad, DE) if A is not None: a, m, z = A if a == 1: raise NotImplementedError('is_deriv_in_field() is required to ' 'solve this problem.') # if c*z*t**m == Dp for p in k<t> and q = p/(z*t**m) in k[t] and # deg(q) <= n: # return q # else: # raise NonElementaryIntegralException if c.is_zero: return c # return 0 if n < c.degree(DE.t): raise NonElementaryIntegralException q = Poly(0, DE.t) while not c.is_zero: m = c.degree(DE.t) if n < m: raise NonElementaryIntegralException # a1 = b + m*Dt/t a1 = b.as_expr() with DecrementLevel(DE): # TODO: Write a dummy function that does this idiom a1a, a1d = frac_in(a1, DE.t) a1a = a1a*etad + etaa*a1d*Poly(m, DE.t) a1d = a1d*etad a2a, a2d = frac_in(c.LC(), DE.t) sa, sd = rischDE(a1a, a1d, a2a, a2d, DE) stm = Poly(sa.as_expr()/sd.as_expr()*DE.t**m, DE.t, expand=False) q += stm n = m - 1 c -= b*stm + derivation(stm, DE) # deg(c) becomes smaller return q def solve_poly_rde(b, cQ, n, DE, parametric=False): """ Solve a Polynomial Risch Differential Equation with degree bound n. This constitutes step 4 of the outline given in the rde.py docstring. For parametric=False, cQ is c, a Poly; for parametric=True, cQ is Q == [q1, ..., qm], a list of Polys. """ from .prde import prde_no_cancel_b_large, prde_no_cancel_b_small # No cancellation if not b.is_zero and (DE.case == 'base' or b.degree(DE.t) > max(0, DE.d.degree(DE.t) - 1)): if parametric: return prde_no_cancel_b_large(b, cQ, n, DE) return no_cancel_b_large(b, cQ, n, DE) elif (b.is_zero or b.degree(DE.t) < DE.d.degree(DE.t) - 1) and \ (DE.case == 'base' or DE.d.degree(DE.t) >= 2): if parametric: return prde_no_cancel_b_small(b, cQ, n, DE) R = no_cancel_b_small(b, cQ, n, DE) if isinstance(R, Poly): return R else: # XXX: Might k be a field? (pg. 209) h, b0, c0 = R with DecrementLevel(DE): b0, c0 = b0.as_poly(DE.t), c0.as_poly(DE.t) if b0 is None: # See above comment raise ValueError('b0 should be a non-Null value') if c0 is None: raise ValueError('c0 should be a non-Null value') y = solve_poly_rde(b0, c0, n, DE).as_poly(DE.t) return h + y elif DE.d.degree(DE.t) >= 2 and b.degree(DE.t) == DE.d.degree(DE.t) - 1 and \ n > -b.as_poly(DE.t).LC()/DE.d.as_poly(DE.t).LC(): # TODO: Is this check necessary, and if so, what should it do if it fails? # b comes from the first element returned from spde() if not b.as_poly(DE.t).LC().is_number: raise TypeError('Result should be a number') if parametric: raise NotImplementedError('prde_no_cancel_b_equal() is not yet ' 'implemented.') R = no_cancel_equal(b, cQ, n, DE) if isinstance(R, Poly): return R else: h, m, C = R # XXX: Or should it be rischDE()? y = solve_poly_rde(b, C, m, DE) return h + y else: # Cancellation if b.is_zero: raise NotImplementedError('Remaining cases for Poly (P)RDE are ' 'not yet implemented (is_deriv_in_field() required).') else: if DE.case == 'exp': if parametric: raise NotImplementedError('Parametric RDE cancellation ' 'hyperexponential case is not yet implemented.') return cancel_exp(b, cQ, n, DE) elif DE.case == 'primitive': if parametric: raise NotImplementedError('Parametric RDE cancellation ' 'primitive case is not yet implemented.') return cancel_primitive(b, cQ, n, DE) else: raise NotImplementedError('Other Poly (P)RDE cancellation ' f'cases are not yet implemented ({DE.case!s}).') raise NotImplementedError('Remaining cases for Poly RDE not yet ' 'implemented.') def rischDE(fa, fd, ga, gd, DE): """ Solve a Risch Differential Equation: Dy + f*y == g. See the outline in the docstring of rde.py for more information about the procedure used. Either raise NonElementaryIntegralException, in which case there is no solution y in the given differential field, or return y in k(t) satisfying Dy + f*y == g, or raise NotImplementedError, in which case, the algorithms necessary to solve the given Risch Differential Equation have not yet been implemented. """ _, (fa, fd) = weak_normalizer(fa, fd, DE) a, (ba, bd), (ca, cd), hn = normal_denom(fa, fd, ga, gd, DE) A, B, C, hs = special_denom(a, ba, bd, ca, cd, DE) try: # Until this is fully implemented, use oo. Note that this will almost # certainly cause non-termination in spde() (unless A == 1), and # *might* lead to non-termination in the next step for a nonelementary # integral (I don't know for certain yet). Fortunately, spde() is # currently written recursively, so this will just give # RuntimeError: maximum recursion depth exceeded. n = bound_degree(A, B, C, DE) except NotImplementedError: # Useful for debugging: # import warnings # warnings.warn("rischDE: Proceeding with n = oo; may cause " # "non-termination.") n = oo B, C, m, alpha, beta = spde(A, B, C, n, DE) if C.is_zero: y = C else: y = solve_poly_rde(B, C, m, DE) return alpha*y + beta, hn*hs
import pytest from diofant import (DiracDelta, Eq, Heaviside, I, Piecewise, Rational, Symbol, adjoint, conjugate, nan, pi, sign, sqrt, symbols, transpose) from diofant.abc import x, y, z from diofant.core.function import ArgumentIndexError __all__ = () def test_DiracDelta(): i = Symbol('i', nonzero=True) j = Symbol('j', positive=True) k = Symbol('k', negative=True) assert DiracDelta(1) == 0 assert DiracDelta(5.1) == 0 assert DiracDelta(-pi) == 0 assert DiracDelta(5, 7) == 0 assert DiracDelta(i) == 0 assert DiracDelta(j) == 0 assert DiracDelta(k) == 0 assert DiracDelta(nan) == nan assert isinstance(DiracDelta(0), DiracDelta) assert isinstance(DiracDelta(x), DiracDelta) assert adjoint(DiracDelta(x)) == DiracDelta(x) assert adjoint(DiracDelta(x - y)) == DiracDelta(x - y) assert conjugate(DiracDelta(x)) == DiracDelta(x) assert conjugate(DiracDelta(x - y)) == DiracDelta(x - y) assert transpose(DiracDelta(x)) == DiracDelta(x) assert transpose(DiracDelta(x - y)) == DiracDelta(x - y) assert DiracDelta(x).diff(x) == DiracDelta(x, 1) assert DiracDelta(x, 1).diff(x) == DiracDelta(x, 2) assert DiracDelta(x).is_simple(x) is True assert DiracDelta(3*x).is_simple(x) is True assert DiracDelta(x**2).is_simple(x) is False assert DiracDelta(sqrt(x)).is_simple(x) is False assert DiracDelta(x).is_simple(y) is False assert DiracDelta(x*y).simplify(x) == DiracDelta(x)/abs(y) assert DiracDelta(x*y).simplify(y) == DiracDelta(y)/abs(x) assert DiracDelta(x**2*y).simplify(x) == DiracDelta(x**2*y) assert DiracDelta(y).simplify(x) == DiracDelta(y) assert DiracDelta((x - 1)*(x - 2)*(x - 3)).simplify(x) == \ DiracDelta(x - 3)/2 + DiracDelta(x - 2) + DiracDelta(x - 1)/2 pytest.raises(ArgumentIndexError, lambda: DiracDelta(x).fdiff(2)) pytest.raises(ValueError, lambda: DiracDelta(x, -1)) def test_heaviside(): x, y = symbols('x, y', extended_real=True) assert Heaviside(0) == 0.5 assert Heaviside(-5) == 0 assert Heaviside(1) == 1 assert Heaviside(nan) == nan assert Heaviside(x).is_real assert adjoint(Heaviside(x)) == Heaviside(x) assert adjoint(Heaviside(x - y)) == Heaviside(x - y) assert conjugate(Heaviside(x)) == Heaviside(x) assert conjugate(Heaviside(x - y)) == Heaviside(x - y) assert transpose(Heaviside(x)) == Heaviside(x) assert transpose(Heaviside(x - y)) == Heaviside(x - y) assert Heaviside(x).diff(x) == DiracDelta(x) assert Heaviside(z + I).is_Function is True assert Heaviside(I*z).is_Function is True pytest.raises(ArgumentIndexError, lambda: Heaviside(x).fdiff(2)) pytest.raises(ValueError, lambda: Heaviside(I)) pytest.raises(ValueError, lambda: Heaviside(2 + 3*I)) def test_rewrite(): x = Symbol('x', extended_real=True) assert Heaviside(x).rewrite(Piecewise) == \ Piecewise((1, x > 0), (Rational(1, 2), Eq(x, 0)), (0, True)) assert Heaviside(y).rewrite(Piecewise) == Heaviside(y) assert Heaviside(x).rewrite(sign) == (sign(x)+1)/2 assert Heaviside(y).rewrite(sign) == Heaviside(y)
diofant/diofant
diofant/tests/functions/test_delta_functions.py
diofant/integrals/rde.py
"""This module is intended for solving recurrences (difference equations).""" import collections import functools from ..concrete import product from ..core import (Add, Dummy, Equality, Function, Integer, Lambda, Mul, Rational, Symbol, Wild, oo) from ..core.compatibility import iterable from ..core.sympify import sympify from ..functions import FallingFactorial, RisingFactorial, binomial, factorial from ..matrices import Matrix, casoratian from ..polys import Poly, gcd, lcm, quo, resultant, roots from ..simplify import hypersimilar, hypersimp from ..utilities import default_sort_key, numbered_symbols from .ode import constantsimp from .solvers import solve def rsolve_poly(coeffs, f, n): r""" Find polynomial solutions for linear recurrence. Given linear recurrence operator `\operatorname{L}` of order `k` with polynomial coefficients and inhomogeneous equation `\operatorname{L} y = f`, where `f` is a polynomial, we seek for all polynomial solutions over field `K` of characteristic zero. Notes ===== The algorithm performs two basic steps: 1. Compute degree `N` of the general polynomial solution. 2. Find all polynomials of degree `N` or less of `\operatorname{L} y = f`. There are two methods for computing the polynomial solutions. If the degree bound is relatively small, i.e. it's smaller than or equal to the order of the recurrence, then naive method of undetermined coefficients is being used. This gives system of algebraic equations with `N+1` unknowns. In the other case, the algorithm performs transformation of the initial equation to an equivalent one, for which the system of algebraic equations has only `r` indeterminates. This method is quite sophisticated (in comparison with the naive one) and was invented together by Abramov, Bronstein and Petkovšek. It is possible to generalize the algorithm implemented here to the case of linear q-difference and differential equations. Examples ======== Lets say that we would like to compute `m`-th Bernoulli polynomial up to a constant, using `b(n+1) - b(n) = m n^{m-1}` recurrence: >>> rsolve_poly([-1, 1], 4*n**3, n) (C0 + n**4 - 2*n**3 + n**2, [C0]) >>> bernoulli(4, n) n**4 - 2*n**3 + n**2 - 1/30 References ========== * :cite:`Abramov1995polynomial` * :cite:`Petkovsek1992hyper` * :cite:`Petkovsek1997AeqB` """ f = sympify(f) if not f.is_polynomial(n): return homogeneous = f.is_zero r = len(coeffs) - 1 coeffs = [Poly(coeff, n) for coeff in coeffs] g = functools.reduce(lambda x, y: gcd(x, y, n, polys=True), coeffs + [f]) if not g.is_ground: coeffs = [quo(c, g, n, polys=False) for c in coeffs] f = quo(f, g, n, polys=False) polys = [Poly(0, n)] * (r + 1) terms = [(Integer(0), -oo)] * (r + 1) for i in range(r + 1): for j in range(i, r + 1): polys[i] += coeffs[j] * binomial(j, i) if not polys[i].is_zero: (exp,), coeff = polys[i].LT() terms[i] = (coeff, exp) d = b = terms[0][1] for i in range(1, r + 1): if terms[i][1] > d: d = terms[i][1] if terms[i][1] - i > b: b = terms[i][1] - i d, b = int(d), int(b) x = Dummy('x') degree_poly = Integer(0) for i in range(r + 1): if terms[i][1] - i == b: degree_poly += terms[i][0] * FallingFactorial(x, i) nni_roots = list(roots(degree_poly, x, filter='Z', predicate=lambda r: r >= 0)) if nni_roots: N = [max(nni_roots)] else: N = [] if homogeneous: N += [-b - 1] else: N += [f.as_poly(n).degree() - b, -b - 1] N = int(max(N)) if N < 0: if homogeneous: return Integer(0), [] else: return if N <= r: C = [] y = E = Integer(0) for i in range(N + 1): C.append(Symbol('C' + str(i))) y += C[i] * n**i for i in range(r + 1): E += coeffs[i].as_expr() * y.subs({n: n + i}) solutions = solve((E - f).as_poly(n).coeffs(), *C) if solutions: solutions = solutions[0] C = [c for c in C if (c not in solutions)] result = y.subs(solutions) else: A = r U = N + A + b + 1 nni_roots = list(roots(polys[r], filter='Z', predicate=lambda r: r >= 0)) if nni_roots != []: a = max(nni_roots) + 1 else: a = Integer(0) def _zero_vector(k): return [Integer(0)] * k def _one_vector(k): return [Integer(1)] * k def _delta(p, k): B = Integer(1) D = p.subs({n: a + k}) for i in range(1, k + 1): B *= -Rational(k - i + 1, i) D += B * p.subs({n: a + k - i}) return D alpha = {} for i in range(-A, d + 1): E = _one_vector(d + 1) for k in range(1, d + 1): E[k] = E[k - 1] * (x + i - k + 1) / k alpha[i] = Integer(0) for j in range(A + 1): for k in range(d + 1): B = binomial(k, i + j) D = _delta(polys[j].as_expr(), k) alpha[i] += E[k] * B * D V = Matrix(U, A, lambda i, j: int(i == j)) if homogeneous: for i in range(A, U): v = _zero_vector(A) for k in range(1, A + b + 1): if i - k < 0: break B = alpha[k - A].subs({x: i - k}) for j in range(A): v[j] += B * V[i - k, j] denom = alpha[-A].subs({x: i}) for j in range(A): V[i, j] = -v[j] / denom else: G = _zero_vector(U) for i in range(A, U): v = _zero_vector(A) g = Integer(0) for k in range(1, A + b + 1): if i - k < 0: break B = alpha[k - A].subs({x: i - k}) for j in range(A): v[j] += B * V[i - k, j] g += B * G[i - k] denom = alpha[-A].subs({x: i}) for j in range(A): V[i, j] = -v[j] / denom G[i] = (_delta(f, i - A) - g) / denom P, Q = _one_vector(U), _zero_vector(A) for i in range(1, U): P[i] = (P[i - 1] * (n - a - i + 1) / i).expand() for i in range(A): Q[i] = Add(*[(v * p).expand() for v, p in zip(V[:, i], P)]) if not homogeneous: h = Add(*[(g * p).expand() for g, p in zip(G, P)]) C = [Symbol('C' + str(i)) for i in range(A)] def g(i): return Add(*[c * _delta(q, i) for c, q in zip(C, Q)]) if homogeneous: E = [g(i) for i in range(N + 1, U)] else: E = [g(i) + _delta(h, i) for i in range(N + 1, U)] if E != []: solutions = solve(E, *C) solutions = solutions[0] else: solutions = {} if homogeneous: result = Integer(0) else: result = h for c, q in list(zip(C, Q)): if c in solutions: s = solutions[c] * q C.remove(c) else: s = c * q result += s.expand() return result, C def rsolve_ratio(coeffs, f, n): r""" Find rational solutions for linear recurrence. Given linear recurrence operator `\operatorname{L}` of order `k` with polynomial coefficients and inhomogeneous equation `\operatorname{L} y = f`, where `f` is a polynomial, we seek for all rational solutions over field `K` of characteristic zero. Notes ===== The algorithm performs two basic steps: 1. Compute polynomial `v(n)` which can be used as universal denominator of any rational solution of equation `\operatorname{L} y = f`. 2. Construct new linear difference equation by substitution `y(n) = u(n)/v(n)` and solve it for `u(n)` finding all its polynomial solutions. Return :obj:`None` if none were found. Algorithm implemented here is a revised version of the original Abramov's algorithm, developed in 1989. The new approach is much simpler to implement and has better overall efficiency. This method can be easily adapted to q-difference equations case. Besides finding rational solutions alone, this functions is an important part of the Hyper algorithm were it is used to find particular solution of inhomogeneous part of a recurrence. Examples ======== >>> rsolve_ratio([-2*n**3 + n**2 + 2*n - 1, 2*n**3 + n**2 - 6*n, ... -2*n**3 - 11*n**2 - 18*n - 9, ... 2*n**3 + 13*n**2 + 22*n + 8], 0, n) (C2*(2*n - 3)/(2*(n**2 - 1)), [C2]) References ========== * :cite:`Abramov1995rational` See Also ======== rsolve_hyper """ f = sympify(f) if not f.is_polynomial(n): return coeffs = list(map(sympify, coeffs)) r = len(coeffs) - 1 A, B = coeffs[r], coeffs[0] A = A.subs({n: n - r}).expand() h = Dummy('h') res = resultant(A, B.subs({n: n + h}), n) assert res.is_polynomial(n) nni_roots = list(roots(res, h, filter='Z', predicate=lambda r: r >= 0)) if not nni_roots: return rsolve_poly(coeffs, f, n) else: C, numers = Integer(1), [Integer(0)] * (r + 1) for i in range(max(nni_roots), -1, -1): d = gcd(A, B.subs({n: n + i}), n) A = quo(A, d, n) B = quo(B, d.subs({n: n - i}), n) C *= Mul(*[d.subs({n: n - j}) for j in range(i + 1)]) denoms = [C.subs({n: n + i}) for i in range(r + 1)] for i in range(r + 1): g = gcd(coeffs[i], denoms[i], n) numers[i] = quo(coeffs[i], g, n) denoms[i] = quo(denoms[i], g, n) for i in range(r + 1): numers[i] *= Mul(*(denoms[:i] + denoms[i + 1:])) result = rsolve_poly(numers, f * Mul(*denoms), n) if result is not None: return (result[0] / C).simplify(), result[1] def rsolve_hyper(coeffs, f, n): r""" Find hypergeometric solutions for linear recurrence. Given linear recurrence operator `\operatorname{L}` of order `k` with polynomial coefficients and inhomogeneous equation `\operatorname{L} y = f` we seek for all hypergeometric solutions over field `K` of characteristic zero. The inhomogeneous part can be either hypergeometric or a sum of a fixed number of pairwise dissimilar hypergeometric terms. Notes ===== The algorithm performs three basic steps: 1. Group together similar hypergeometric terms in the inhomogeneous part of `\operatorname{L} y = f`, and find particular solution using Abramov's algorithm. 2. Compute generating set of `\operatorname{L}` and find basis in it, so that all solutions are linearly independent. 3. Form final solution with the number of arbitrary constants equal to dimension of basis of `\operatorname{L}`. The output of this procedure is a linear combination of fixed number of hypergeometric terms. However the underlying method can generate larger class of solutions - D'Alembertian terms. This method not only computes the kernel of the inhomogeneous equation, but also reduces in to a basis so that solutions generated by this procedure are linearly independent. Examples ======== >>> rsolve_hyper([-1, 1], 1 + n, n) (C0 + n*(n + 1)/2, [C0]) References ========== * :cite:`Petkovsek1992hyper` * :cite:`Petkovsek1997AeqB` """ coeffs = list(map(sympify, coeffs)) f = sympify(f) r, kernel, symbols = len(coeffs) - 1, [], set() if not f.is_zero: if f.is_Add: similar = {} for g in f.expand().args: if not g.is_hypergeometric(n): return for h in list(similar): if hypersimilar(g, h, n): similar[h] += g break else: similar[g] = Integer(0) inhomogeneous = [] for g, h in similar.items(): inhomogeneous.append(g + h) elif f.is_hypergeometric(n): inhomogeneous = [f] else: return for i, g in enumerate(inhomogeneous): coeff, polys = Integer(1), coeffs[:] denoms = [Integer(1)] * (r + 1) g = g.simplify() s = hypersimp(g, n) for j in range(1, r + 1): coeff *= s.subs({n: n + j - 1}) p, q = coeff.as_numer_denom() polys[j] *= p denoms[j] = q for j in range(r + 1): polys[j] *= Mul(*(denoms[:j] + denoms[j + 1:])) R = rsolve_ratio(polys, Mul(*denoms), n) if R is not None: R, syms = R if syms: R = R.subs(zip(syms, [0] * len(syms))) if R: inhomogeneous[i] *= R else: return result = Add(*inhomogeneous) result = result.simplify() else: result = Integer(0) Z = Dummy('Z') p, q = coeffs[0], coeffs[r].subs({n: n - r + 1}) p_factors = list(roots(p, n)) q_factors = list(roots(q, n)) factors = [(Integer(1), Integer(1))] for p in p_factors: for q in q_factors: if p.is_integer and q.is_integer and p <= q: continue else: factors += [(n - p, n - q)] p = [(n - p, Integer(1)) for p in p_factors] q = [(Integer(1), n - q) for q in q_factors] factors = p + factors + q for A, B in factors: polys, degrees = [], [] D = A * B.subs({n: n + r - 1}) for i in range(r + 1): a = Mul(*[A.subs({n: n + j}) for j in range(0, i)]) b = Mul(*[B.subs({n: n + j}) for j in range(i, r)]) poly = quo(coeffs[i] * a * b, D, n) polys.append(poly.as_poly(n)) if not poly.is_zero: degrees.append(polys[i].degree()) d, poly = max(degrees), Integer(0) for i in range(r + 1): coeff = polys[i].coeff_monomial((d,)) if coeff != 0: poly += coeff * Z**i for z in roots(poly, Z): if z.is_zero: continue sol, syms = rsolve_poly([polys[i] * z**i for i in range(r + 1)], 0, n) sol = sol.collect(syms) sol = [sol.coeff(_) for _ in syms] for C in sol: ratio = z * A * C.subs({n: n + 1}) / B / C ratio = ratio.simplify() skip = max([-1] + [v for v in roots(Mul(*ratio.as_numer_denom()), n) if v.is_Integer]) + 1 K = product(ratio, (n, skip, n - 1)) if K.has(factorial, FallingFactorial, RisingFactorial): K = K.simplify() if casoratian(kernel + [K], n, zero=False) != 0: kernel.append(K) kernel.sort(key=default_sort_key) sk = list(zip(numbered_symbols('C'), kernel)) for C, ker in sk: result += C * ker symbols |= {s for s, k in sk} return result, sorted(symbols, key=default_sort_key) def rsolve(f, *y, init={}, simplify=True): r""" Solve recurrence equations. The equations can involve objects of the form `y(n + k)`, where `k` is a constant. Parameters ========== f : Expr, Equality or iterable of above The single recurrence equation or a system of recurrence equations. \*y : tuple Holds function applications `y(n)`, wrt to which the recurrence equation(s) will be solved. If none given (empty tuple), this will be guessed from the provided equation(s). init : dict, optional The initial/boundary conditions for the recurrence equations as mapping of the function application `y(n_i)` to its value. Default is empty dictionary. simplify : bool, optional Enable simplification (default) on solutions. Examples ======== >>> eq = (n - 1)*f(n + 2) - (n**2 + 3*n - 2)*f(n + 1) + 2*n*(n + 1)*f(n) >>> rsolve(eq) [{f: Lambda(n, 2**n*C0 + C1*factorial(n))}] >>> rsolve(eq, init={f(0): 0, f(1): 3}) [{f: Lambda(n, 3*2**n - 3*factorial(n))}] Notes ===== Currently, the function can handle linear recurrences with polynomial coefficients and hypergeometric inhomogeneous part. See Also ======== diofant.solvers.ode.dsolve : solving differential equations diofant.solvers.solvers.solve : solving algebraic equations """ if not iterable(f): f = [f] f = [_.lhs - _.rhs if isinstance(_, Equality) else _ for _ in f] f = [_.expand() for _ in f] if len(f) > 1 or len(y) > 1: raise NotImplementedError('Support for systems of recurrence ' 'equations is not implemented yet.') else: f = f[0] if not y: y = sorted(f.atoms(Function), key=default_sort_key)[0] else: y = y[0] n = y.args[0] h_part = collections.defaultdict(lambda: Integer(0)) i_part = Integer(0) for h, c in f.collect(y.func(Wild('n')), evaluate=False).items(): if h.func == y.func: k = Wild('k', exclude=(n,)) r = h.args[0].match(n + k) if r: c = c.simplify() if not c.is_rational_function(n): raise ValueError(f"Rational function of '{n}' expected, got '{c}'") h_part[int(r[k])] = c else: raise ValueError(f"'{y.func}({n} + Integer)' expected, got '{h}'") else: i_term = h * c if i_term.find(y.func(Wild('k'))): raise NotImplementedError(f"Linear recurrence for '{y.func}' " f"expected, got '{f}'") i_part -= i_term if not i_part.is_zero: if not all(p.is_hypergeometric(n) for p in i_part.as_coeff_add(n)[1]): raise NotImplementedError('Inhomogeneous part should be a sum of ' f"hypergeometric terms in '{n}', got " f"'{i_part}'") k_min, k_max = min(h_part), max(h_part) if k_min < 0: return rsolve(f.subs({n: n + abs(k_min)}), y, init=init, simplify=simplify) i_numer, i_denom = i_part.as_numer_denom() common = functools.reduce(lcm, [x.as_numer_denom()[1] for x in h_part.values()] + [i_denom]) if common != 1: for k, coeff in h_part.items(): numer, denom = coeff.as_numer_denom() h_part[k] = numer * quo(common, denom, n) i_part = i_numer * quo(common, i_denom, n) coeffs = [h_part[i] for i in range(k_max + 1)] result = rsolve_hyper(coeffs, i_part, n) if result is None: return solution, symbols = result if symbols and init != {}: equations = [] for k, v in init.items(): if k.is_Function and k.func == y.func: i = int(k.args[0]) else: raise ValueError(f"'{y.func}(Integer)' expected, got '{k}'") eq = solution.limit(n, i) - v equations.append(eq) result = solve(equations, *symbols) if not result: return else: solution = solution.subs(result[0]) if simplify: solution = solution.expand(log=True, mul=False) solution = constantsimp(solution, symbols) solution = solution.simplify() return [{y.func: Lambda((n,), solution)}]
import pytest from diofant import (DiracDelta, Eq, Heaviside, I, Piecewise, Rational, Symbol, adjoint, conjugate, nan, pi, sign, sqrt, symbols, transpose) from diofant.abc import x, y, z from diofant.core.function import ArgumentIndexError __all__ = () def test_DiracDelta(): i = Symbol('i', nonzero=True) j = Symbol('j', positive=True) k = Symbol('k', negative=True) assert DiracDelta(1) == 0 assert DiracDelta(5.1) == 0 assert DiracDelta(-pi) == 0 assert DiracDelta(5, 7) == 0 assert DiracDelta(i) == 0 assert DiracDelta(j) == 0 assert DiracDelta(k) == 0 assert DiracDelta(nan) == nan assert isinstance(DiracDelta(0), DiracDelta) assert isinstance(DiracDelta(x), DiracDelta) assert adjoint(DiracDelta(x)) == DiracDelta(x) assert adjoint(DiracDelta(x - y)) == DiracDelta(x - y) assert conjugate(DiracDelta(x)) == DiracDelta(x) assert conjugate(DiracDelta(x - y)) == DiracDelta(x - y) assert transpose(DiracDelta(x)) == DiracDelta(x) assert transpose(DiracDelta(x - y)) == DiracDelta(x - y) assert DiracDelta(x).diff(x) == DiracDelta(x, 1) assert DiracDelta(x, 1).diff(x) == DiracDelta(x, 2) assert DiracDelta(x).is_simple(x) is True assert DiracDelta(3*x).is_simple(x) is True assert DiracDelta(x**2).is_simple(x) is False assert DiracDelta(sqrt(x)).is_simple(x) is False assert DiracDelta(x).is_simple(y) is False assert DiracDelta(x*y).simplify(x) == DiracDelta(x)/abs(y) assert DiracDelta(x*y).simplify(y) == DiracDelta(y)/abs(x) assert DiracDelta(x**2*y).simplify(x) == DiracDelta(x**2*y) assert DiracDelta(y).simplify(x) == DiracDelta(y) assert DiracDelta((x - 1)*(x - 2)*(x - 3)).simplify(x) == \ DiracDelta(x - 3)/2 + DiracDelta(x - 2) + DiracDelta(x - 1)/2 pytest.raises(ArgumentIndexError, lambda: DiracDelta(x).fdiff(2)) pytest.raises(ValueError, lambda: DiracDelta(x, -1)) def test_heaviside(): x, y = symbols('x, y', extended_real=True) assert Heaviside(0) == 0.5 assert Heaviside(-5) == 0 assert Heaviside(1) == 1 assert Heaviside(nan) == nan assert Heaviside(x).is_real assert adjoint(Heaviside(x)) == Heaviside(x) assert adjoint(Heaviside(x - y)) == Heaviside(x - y) assert conjugate(Heaviside(x)) == Heaviside(x) assert conjugate(Heaviside(x - y)) == Heaviside(x - y) assert transpose(Heaviside(x)) == Heaviside(x) assert transpose(Heaviside(x - y)) == Heaviside(x - y) assert Heaviside(x).diff(x) == DiracDelta(x) assert Heaviside(z + I).is_Function is True assert Heaviside(I*z).is_Function is True pytest.raises(ArgumentIndexError, lambda: Heaviside(x).fdiff(2)) pytest.raises(ValueError, lambda: Heaviside(I)) pytest.raises(ValueError, lambda: Heaviside(2 + 3*I)) def test_rewrite(): x = Symbol('x', extended_real=True) assert Heaviside(x).rewrite(Piecewise) == \ Piecewise((1, x > 0), (Rational(1, 2), Eq(x, 0)), (0, True)) assert Heaviside(y).rewrite(Piecewise) == Heaviside(y) assert Heaviside(x).rewrite(sign) == (sign(x)+1)/2 assert Heaviside(y).rewrite(sign) == Heaviside(y)
diofant/diofant
diofant/tests/functions/test_delta_functions.py
diofant/solvers/recurr.py
""" A Printer for generating executable code. The most important function here is srepr (that is an exact equivalent of builtin repr, except for optional arguments) that returns a string so that the relation eval(srepr(expr))=expr holds in an appropriate environment. """ from __future__ import annotations import typing import mpmath.libmp as mlib from mpmath.libmp import prec_to_dps, repr_dps from ..core.function import AppliedUndef from ..utilities import default_sort_key from .defaults import DefaultPrinting from .printer import Printer class ReprPrinter(Printer): """Repr printer.""" printmethod = '_diofantrepr' _default_settings: dict[str, typing.Any] = { 'order': None } def reprify(self, args, sep): """ Prints each item in `args` and joins them with `sep`. """ return sep.join([self.doprint(item) for item in args]) def emptyPrinter(self, expr): """ The fallback printer. """ if hasattr(expr, 'args') and hasattr(expr.args, '__iter__'): l = [] for o in expr.args: l.append(self._print(o)) return expr.__class__.__name__ + '(%s)' % ', '.join(l) elif hasattr(expr, '__repr__') and not issubclass(expr.__class__, DefaultPrinting): return repr(expr) else: return object.__repr__(expr) def _print_Dict(self, expr): l = [] for o in sorted(expr.args, key=default_sort_key): l.append(self._print(o)) return expr.__class__.__name__ + '(%s)' % ', '.join(l) def _print_Add(self, expr, order=None): args = expr.as_ordered_terms(order=order or self.order) args = map(self._print, args) return 'Add(%s)' % ', '.join(args) def _print_Function(self, expr): r = self._print(expr.func) r += '(%s)' % ', '.join([self._print(a) for a in expr.args]) return r def _print_FunctionClass(self, expr): if issubclass(expr, AppliedUndef): return f'Function({expr.__name__!r})' else: return expr.__name__ def _print_RationalConstant(self, expr): return f'Rational({expr.numerator}, {expr.denominator})' def _print_AtomicExpr(self, expr): return str(expr) def _print_NumberSymbol(self, expr): return str(expr) def _print_Integer(self, expr): return 'Integer(%i)' % int(expr.numerator) def _print_list(self, expr): return '[%s]' % self.reprify(expr, ', ') def _print_MatrixBase(self, expr): # special case for some empty matrices if (expr.rows == 0) ^ (expr.cols == 0): return '%s(%s, %s, %s)' % (expr.__class__.__name__, self._print(expr.rows), self._print(expr.cols), self._print([])) l = [] for i in range(expr.rows): l.append([]) for j in range(expr.cols): l[-1].append(expr[i, j]) return f'{expr.__class__.__name__}({self._print(l)})' def _print_BooleanTrue(self, expr): return 'true' def _print_BooleanFalse(self, expr): return 'false' def _print_NaN(self, expr): return 'nan' def _print_Mul(self, expr, order=None): terms = expr.args args = expr._new_rawargs(*terms).as_ordered_factors() args = map(self._print, args) return 'Mul(%s)' % ', '.join(args) def _print_Rational(self, expr): return 'Rational(%s, %s)' % (self._print(int(expr.numerator)), self._print(int(expr.denominator))) def _print_Float(self, expr): dps = prec_to_dps(expr._prec) r = mlib.to_str(expr._mpf_, repr_dps(expr._prec)) return f"{expr.__class__.__name__}('{r}', dps={dps:d})" def _print_BaseSymbol(self, expr): d = expr._assumptions.generator if d == {}: return f'{expr.__class__.__name__}({self._print(expr.name)})' else: attr = [f'{k}={v}' for k, v in d.items()] return '%s(%s, %s)' % (expr.__class__.__name__, self._print(expr.name), ', '.join(attr)) def _print_str(self, expr): return repr(expr) def _print_tuple(self, expr): if len(expr) == 1: return '(%s,)' % self._print(expr[0]) else: return '(%s)' % self.reprify(expr, ', ') def _print_WildFunction(self, expr): return f"{expr.__class__.__name__}('{expr.name}')" def _print_PolynomialRing(self, ring): return '%s(%s, %s, %s)' % (ring.__class__.__name__, self._print(ring.domain), self._print(ring.symbols), self._print(ring.order)) def _print_GMPYIntegerRing(self, expr): return f'{expr.__class__.__name__}()' _print_GMPYRationalField = _print_GMPYIntegerRing _print_PythonIntegerRing = _print_GMPYIntegerRing _print_PythonRationalField = _print_GMPYIntegerRing _print_LexOrder = _print_GMPYIntegerRing _print_GradedLexOrder = _print_LexOrder def _print_FractionField(self, field): return '%s(%s, %s, %s)' % (field.__class__.__name__, self._print(field.domain), self._print(field.symbols), self._print(field.order)) def _print_PolyElement(self, poly): terms = list(poly.items()) terms.sort(key=poly.ring.order, reverse=True) return f'{poly.__class__.__name__}({self._print(poly.ring)}, {self._print(terms)})' def _print_FracElement(self, frac): numer_terms = list(frac.numerator.items()) numer_terms.sort(key=frac.field.order, reverse=True) denom_terms = list(frac.denominator.items()) denom_terms.sort(key=frac.field.order, reverse=True) numer = self._print(numer_terms) denom = self._print(denom_terms) return f'{frac.__class__.__name__}({self._print(frac.field)}, {numer}, {denom})' def _print_AlgebraicField(self, expr): return 'AlgebraicField(%s, %s)' % (self._print(expr.domain), self._print(expr.ext.as_expr())) def _print_AlgebraicElement(self, expr): return '%s(%s)' % (self._print(expr.parent), self._print(list(map(expr.domain.domain.to_expr, expr.rep.all_coeffs())))) def _print_Domain(self, expr): return expr.rep def srepr(expr, **settings): """Return expr in repr form.""" return ReprPrinter(settings).doprint(expr)
import pytest from diofant import (DiracDelta, Eq, Heaviside, I, Piecewise, Rational, Symbol, adjoint, conjugate, nan, pi, sign, sqrt, symbols, transpose) from diofant.abc import x, y, z from diofant.core.function import ArgumentIndexError __all__ = () def test_DiracDelta(): i = Symbol('i', nonzero=True) j = Symbol('j', positive=True) k = Symbol('k', negative=True) assert DiracDelta(1) == 0 assert DiracDelta(5.1) == 0 assert DiracDelta(-pi) == 0 assert DiracDelta(5, 7) == 0 assert DiracDelta(i) == 0 assert DiracDelta(j) == 0 assert DiracDelta(k) == 0 assert DiracDelta(nan) == nan assert isinstance(DiracDelta(0), DiracDelta) assert isinstance(DiracDelta(x), DiracDelta) assert adjoint(DiracDelta(x)) == DiracDelta(x) assert adjoint(DiracDelta(x - y)) == DiracDelta(x - y) assert conjugate(DiracDelta(x)) == DiracDelta(x) assert conjugate(DiracDelta(x - y)) == DiracDelta(x - y) assert transpose(DiracDelta(x)) == DiracDelta(x) assert transpose(DiracDelta(x - y)) == DiracDelta(x - y) assert DiracDelta(x).diff(x) == DiracDelta(x, 1) assert DiracDelta(x, 1).diff(x) == DiracDelta(x, 2) assert DiracDelta(x).is_simple(x) is True assert DiracDelta(3*x).is_simple(x) is True assert DiracDelta(x**2).is_simple(x) is False assert DiracDelta(sqrt(x)).is_simple(x) is False assert DiracDelta(x).is_simple(y) is False assert DiracDelta(x*y).simplify(x) == DiracDelta(x)/abs(y) assert DiracDelta(x*y).simplify(y) == DiracDelta(y)/abs(x) assert DiracDelta(x**2*y).simplify(x) == DiracDelta(x**2*y) assert DiracDelta(y).simplify(x) == DiracDelta(y) assert DiracDelta((x - 1)*(x - 2)*(x - 3)).simplify(x) == \ DiracDelta(x - 3)/2 + DiracDelta(x - 2) + DiracDelta(x - 1)/2 pytest.raises(ArgumentIndexError, lambda: DiracDelta(x).fdiff(2)) pytest.raises(ValueError, lambda: DiracDelta(x, -1)) def test_heaviside(): x, y = symbols('x, y', extended_real=True) assert Heaviside(0) == 0.5 assert Heaviside(-5) == 0 assert Heaviside(1) == 1 assert Heaviside(nan) == nan assert Heaviside(x).is_real assert adjoint(Heaviside(x)) == Heaviside(x) assert adjoint(Heaviside(x - y)) == Heaviside(x - y) assert conjugate(Heaviside(x)) == Heaviside(x) assert conjugate(Heaviside(x - y)) == Heaviside(x - y) assert transpose(Heaviside(x)) == Heaviside(x) assert transpose(Heaviside(x - y)) == Heaviside(x - y) assert Heaviside(x).diff(x) == DiracDelta(x) assert Heaviside(z + I).is_Function is True assert Heaviside(I*z).is_Function is True pytest.raises(ArgumentIndexError, lambda: Heaviside(x).fdiff(2)) pytest.raises(ValueError, lambda: Heaviside(I)) pytest.raises(ValueError, lambda: Heaviside(2 + 3*I)) def test_rewrite(): x = Symbol('x', extended_real=True) assert Heaviside(x).rewrite(Piecewise) == \ Piecewise((1, x > 0), (Rational(1, 2), Eq(x, 0)), (0, True)) assert Heaviside(y).rewrite(Piecewise) == Heaviside(y) assert Heaviside(x).rewrite(sign) == (sign(x)+1)/2 assert Heaviside(y).rewrite(sign) == Heaviside(y)
diofant/diofant
diofant/tests/functions/test_delta_functions.py
diofant/printing/repr.py
""" module for generating C, C++, Fortran77, Fortran90 and Octave/Matlab routines that evaluate diofant expressions. This module is work in progress. Only the milestones with a '+' character in the list below have been completed. --- How is diofant.utilities.codegen different from diofant.printing.ccode? --- We considered the idea to extend the printing routines for diofant functions in such a way that it prints complete compilable code, but this leads to a few unsurmountable issues that can only be tackled with dedicated code generator: - For C, one needs both a code and a header file, while the printing routines generate just one string. This code generator can be extended to support .pyf files for f2py. - Diofant functions are not concerned with programming-technical issues, such as input, output and input-output arguments. Other examples are contiguous or non-contiguous arrays, including headers of other libraries such as gsl or others. - It is highly interesting to evaluate several diofant functions in one C routine, eventually sharing common intermediate results with the help of the cse routine. This is more than just printing. - From the programming perspective, expressions with constants should be evaluated in the code generator as much as possible. This is different for printing. --- Basic assumptions --- * A generic Routine data structure describes the routine that must be translated into C/Fortran/... code. This data structure covers all features present in one or more of the supported languages. * Descendants from the CodeGen class transform multiple Routine instances into compilable code. Each derived class translates into a specific language. * In many cases, one wants a simple workflow. The friendly functions in the last part are a simple api on top of the Routine/CodeGen stuff. They are easier to use, but are less powerful. --- Milestones --- + First working version with scalar input arguments, generating C code, tests + Friendly functions that are easier to use than the rigorous Routine/CodeGen workflow. + Integer and Real numbers as input and output + Output arguments + InputOutput arguments + Sort input/output arguments properly + Contiguous array arguments (numpy matrices) + Also generate .pyf code for f2py (in autowrap module) + Isolate constants and evaluate them beforehand in double precision + Fortran 90 + Octave/Matlab - Common Subexpression Elimination - User defined comments in the generated code - Optional extra include lines for libraries/objects that can eval special functions - Test other C compilers and libraries: gcc, tcc, libtcc, gcc+gsl, ... - Contiguous array arguments (diofant matrices) - Non-contiguous array arguments (diofant matrices) - ccode must raise an error when it encounters something that can not be translated into c. ccode(integrate(sin(x)/x, x)) does not make sense. - Complex numbers as input and output - A default complex datatype - Include extra information in the header: date, user, hostname, sha1 hash, ... - Fortran 77 - C++ - Python - ... """ import os import textwrap from io import StringIO from .. import __version__ as diofant_version from ..core import Dummy, Equality, Expr, Function, Integer, Symbol, Tuple from ..core.compatibility import is_sequence from ..matrices import (ImmutableMatrix, MatrixBase, MatrixExpr, MatrixSlice, MatrixSymbol) from ..printing.ccode import CCodePrinter, ccode from ..printing.fcode import FCodePrinter, fcode from ..printing.octave import OctaveCodePrinter, octave_code from ..tensor import Idx, Indexed, IndexedBase __all__ = ( # description of routines 'Routine', 'DataType', 'default_datatypes', 'get_default_datatype', 'Argument', 'InputArgument', 'OutputArgument', 'Result', # routines -> code 'CodeGen', 'CCodeGen', 'FCodeGen', 'OctaveCodeGen', # friendly functions 'codegen', 'make_routine', ) # # Description of routines # class Routine: """Generic description of evaluation routine for set of expressions. A CodeGen class can translate instances of this class into code in a particular language. The routine specification covers all the features present in these languages. The CodeGen part must raise an exception when certain features are not present in the target language. For example, multiple return values are possible in Python, but not in C or Fortran. Another example: Fortran and Python support complex numbers, while C does not. """ def __init__(self, name, arguments, results, local_vars, global_vars): """Initialize a Routine instance. Parameters ========== name : string Name of the routine. arguments : list of Arguments These are things that appear in arguments of a routine, often appearing on the right-hand side of a function call. These are commonly InputArguments but in some languages, they can also be OutputArguments or InOutArguments (e.g., pass-by-reference in C code). results : list of Results These are the return values of the routine, often appearing on the left-hand side of a function call. The difference between Results and OutputArguments and when you should use each is language-specific. local_vars : list of Results These are variables that will be defined at the beginning of the function. global_vars : list of Symbols Variables which will not be passed into the function. """ # extract all input symbols and all symbols appearing in an expression input_symbols = set() symbols = set() for arg in arguments: if isinstance(arg, OutputArgument): symbols.update(arg.expr.free_symbols) elif isinstance(arg, InputArgument): input_symbols.add(arg.name) elif isinstance(arg, InOutArgument): input_symbols.add(arg.name) symbols.update(arg.expr.free_symbols) else: raise ValueError(f'Unknown Routine argument: {arg}') for r in results: if not isinstance(r, Result): raise ValueError(f'Unknown Routine result: {r}') symbols.update(r.expr.free_symbols) local_symbols = set() for r in local_vars: if isinstance(r, Result): symbols.update(r.expr.free_symbols) local_symbols.add(r.name) else: local_symbols.add(r) # Check that all symbols in the expressions are covered by # InputArguments/InOutArguments---subset because user could # specify additional (unused) InputArguments or local_vars. notcovered = symbols.difference(input_symbols | local_symbols | global_vars) if notcovered != set(): raise ValueError('Symbols needed for output are not in input ' + ', '.join([str(x) for x in notcovered])) self.name = name self.arguments = arguments self.results = results self.local_vars = local_vars self.global_vars = global_vars @property def variables(self): """Returns a set of all variables possibly used in the routine. For routines with unnamed return values, the dummies that may or may not be used will be included in the set. """ v = set(self.local_vars) for arg in self.arguments: v.add(arg.name) for res in self.results: v.add(res.result_var) return v @property def result_variables(self): """Returns a list of OutputArgument, InOutArgument and Result. If return values are present, they are at the end ot the list. """ args = [arg for arg in self.arguments if isinstance( arg, (OutputArgument, InOutArgument))] args.extend(self.results) return args class DataType: """Holds strings for a certain datatype in different languages.""" def __init__(self, cname, fname, pyname, octname): self.cname = cname self.fname = fname self.pyname = pyname self.octname = octname default_datatypes = { 'int': DataType('int', 'INTEGER*4', 'int', ''), 'float': DataType('double', 'REAL*8', 'float', '') } def get_default_datatype(expr): """Derives an appropriate datatype based on the expression.""" if expr.is_integer: return default_datatypes['int'] else: return default_datatypes['float'] class Variable: """Represents a typed variable.""" def __init__(self, name, datatype=None, dimensions=None, precision=None): """Return a new variable. Parameters ========== name : Symbol or MatrixSymbol datatype : optional When not given, the data type will be guessed based on the assumptions on the symbol argument. dimension : sequence containing tupes, optional If present, the argument is interpreted as an array, where this sequence of tuples specifies (lower, upper) bounds for each index of the array. precision : int, optional Controls the precision of floating point constants. """ if not isinstance(name, (Dummy, Symbol, MatrixSymbol)): raise TypeError('The first argument must be a diofant symbol.') if datatype is None: datatype = get_default_datatype(name) elif not isinstance(datatype, DataType): raise TypeError("The (optional) `datatype' argument must be an " 'instance of the DataType class.') if dimensions and not isinstance(dimensions, (tuple, list)): raise TypeError( 'The dimension argument must be a sequence of tuples') self._name = name self._datatype = { 'C': datatype.cname, 'FORTRAN': datatype.fname, 'OCTAVE': datatype.octname, 'PYTHON': datatype.pyname } self.dimensions = dimensions self.precision = precision @property def name(self): return self._name def get_datatype(self, language): """Returns the datatype string for the requested language. Examples ======== >>> x = Variable(Symbol('x')) >>> x.get_datatype('c') 'double' >>> x.get_datatype('fortran') 'REAL*8' """ try: return self._datatype[language.upper()] except KeyError: raise CodeGenError('Has datatypes for languages: %s' % ', '.join(self._datatype)) class Argument(Variable): """An abstract Argument data structure: a name and a data type. This structure is refined in the descendants below. """ class InputArgument(Argument): """Input argument class.""" class ResultBase: """Base class for all "outgoing" information from a routine. Objects of this class stores a diofant expression, and a diofant object representing a result variable that will be used in the generated code only if necessary. """ def __init__(self, expr, result_var): self.expr = expr self.result_var = result_var class OutputArgument(Argument, ResultBase): """OutputArgument are always initialized in the routine.""" def __init__(self, name, result_var, expr, datatype=None, dimensions=None, precision=None): """Return a new variable. Parameters ========== name : Symbol, MatrixSymbol The name of this variable. When used for code generation, this might appear, for example, in the prototype of function in the argument list. result_var : Symbol, Indexed Something that can be used to assign a value to this variable. Typically the same as `name` but for Indexed this should be e.g., "y[i]" whereas `name` should be the Symbol "y". expr : object The expression that should be output, typically a Diofant expression. datatype : optional When not given, the data type will be guessed based on the assumptions on the symbol argument. dimension : sequence containing tupes, optional If present, the argument is interpreted as an array, where this sequence of tuples specifies (lower, upper) bounds for each index of the array. precision : int, optional Controls the precision of floating point constants. """ Argument.__init__(self, name, datatype, dimensions, precision) ResultBase.__init__(self, expr, result_var) class InOutArgument(Argument, ResultBase): """InOutArgument are never initialized in the routine.""" def __init__(self, name, result_var, expr, datatype=None, dimensions=None, precision=None): if not datatype: datatype = get_default_datatype(expr) Argument.__init__(self, name, datatype, dimensions, precision) ResultBase.__init__(self, expr, result_var) __init__.__doc__ = OutputArgument.__init__.__doc__ class Result(Variable, ResultBase): """An expression for a return value. The name result is used to avoid conflicts with the reserved word "return" in the python language. It is also shorter than ReturnValue. These may or may not need a name in the destination (e.g., "return(x*y)" might return a value without ever naming it). """ def __init__(self, expr, name=None, result_var=None, datatype=None, dimensions=None, precision=None): """Initialize a return value. Parameters ========== expr : Diofant expression name : Symbol, MatrixSymbol, optional The name of this return variable. When used for code generation, this might appear, for example, in the prototype of function in a list of return values. A dummy name is generated if omitted. result_var : Symbol, Indexed, optional Something that can be used to assign a value to this variable. Typically the same as `name` but for Indexed this should be e.g., "y[i]" whereas `name` should be the Symbol "y". Defaults to `name` if omitted. datatype : optional When not given, the data type will be guessed based on the assumptions on the symbol argument. dimension : sequence containing tupes, optional If present, this variable is interpreted as an array, where this sequence of tuples specifies (lower, upper) bounds for each index of the array. precision : int, optional Controls the precision of floating point constants. """ if not isinstance(expr, (Expr, MatrixBase)): raise TypeError('The first argument must be a diofant expression.') if name is None: name = 'result_%d' % abs(hash(expr)) if isinstance(name, str): if isinstance(expr, (MatrixBase, MatrixExpr)): name = MatrixSymbol(name, *expr.shape) else: name = Symbol(name) if result_var is None: result_var = name Variable.__init__(self, name, datatype=datatype, dimensions=dimensions, precision=precision) ResultBase.__init__(self, expr, result_var) # # Transformation of routine objects into code # class CodeGen: """Abstract class for the code generators.""" printer = None # will be set to an instance of a CodePrinter subclass def __init__(self, project='project', cse=False): """Initialize a code generator. Derived classes will offer more options that affect the generated code. """ self.project = project self.cse = cse def routine(self, name, expr, argument_sequence, global_vars=None): """Creates an Routine object that is appropriate for this language. This implementation is appropriate for at least C/Fortran. Subclasses can override this if necessary. Here, we assume at most one return value (the l-value) which must be scalar. Additional outputs are OutputArguments (e.g., pointers on right-hand-side or pass-by-reference). Matrices are always returned via OutputArguments. If ``argument_sequence`` is None, arguments will be ordered alphabetically, but with all InputArguments first, and then OutputArgument and InOutArguments. """ if self.cse: from ..simplify import cse if is_sequence(expr) and not isinstance(expr, (MatrixBase, MatrixExpr)): if not expr: raise ValueError('No expression given') for e in expr: if not e.is_Equality: raise CodeGenError(f'Lists of expressions must all be Equalities. {e} is not.') # create a list of right hand sides and simplify them rhs = [e.rhs for e in expr] common, simplified = cse(rhs) # pack the simplified expressions back up with their left hand sides expr = [Equality(e.lhs, rhs) for e, rhs in zip(expr, simplified)] else: if isinstance(expr, Equality): common, simplified = cse(expr.rhs) expr = Equality(expr.lhs, simplified[0]) else: common, simplified = cse(expr) expr = simplified local_vars = [Result(b, a) for a, b in common] local_symbols = {a for a, _ in common} local_expressions = Tuple(*[b for _, b in common]) else: local_expressions = Tuple() if is_sequence(expr) and not isinstance(expr, (MatrixBase, MatrixExpr)): if not expr: raise ValueError('No expression given') expressions = Tuple(*expr) else: expressions = Tuple(expr) # local variables if not self.cse: # local variables for indexed expressions local_vars = {i.label for i in expressions.atoms(Idx)} local_symbols = local_vars # global variables global_vars = set() if global_vars is None else set(global_vars) # symbols that should be arguments symbols = (expressions.free_symbols | local_expressions.free_symbols) - local_symbols - global_vars # Decide whether to use output argument or return value return_val = [] output_args = [] for expr in expressions: if isinstance(expr, Equality): out_arg = expr.lhs expr = expr.rhs if isinstance(out_arg, Indexed): dims = tuple((Integer(0), dim - 1) for dim in out_arg.shape) symbol = out_arg.base.label elif isinstance(out_arg, Symbol): dims = [] symbol = out_arg elif isinstance(out_arg, MatrixSymbol): dims = tuple((Integer(0), dim - 1) for dim in out_arg.shape) symbol = out_arg else: raise CodeGenError('Only Indexed, Symbol, or MatrixSymbol ' 'can define output arguments.') if expr.has(symbol): output_args.append( InOutArgument(symbol, out_arg, expr, dimensions=dims)) else: output_args.append( OutputArgument(symbol, out_arg, expr, dimensions=dims)) # avoid duplicate arguments symbols.remove(symbol) elif isinstance(expr, (ImmutableMatrix, MatrixSlice)): # Create a "dummy" MatrixSymbol to use as the Output arg out_arg = MatrixSymbol('out_%s' % abs(hash(expr)), *expr.shape) dims = tuple((Integer(0), dim - 1) for dim in out_arg.shape) output_args.append( OutputArgument(out_arg, out_arg, expr, dimensions=dims)) else: return_val.append(Result(expr)) arg_list = [] # setup input argument list array_symbols = {} for array in expressions.atoms(Indexed) | local_expressions.atoms(Indexed): array_symbols[array.base.label] = array for array in expressions.atoms(MatrixSymbol) | local_expressions.atoms(MatrixSymbol): array_symbols[array] = array for symbol in sorted(symbols, key=str): if symbol in array_symbols: dims = [] array = array_symbols[symbol] for dim in array.shape: dims.append((Integer(0), dim - 1)) metadata = {'dimensions': dims} else: metadata = {} arg_list.append(InputArgument(symbol, **metadata)) output_args.sort(key=lambda x: str(x.name)) arg_list.extend(output_args) if argument_sequence is not None: # if the user has supplied IndexedBase instances, we'll accept that new_sequence = [] for arg in argument_sequence: if isinstance(arg, IndexedBase): new_sequence.append(arg.label) else: new_sequence.append(arg) argument_sequence = new_sequence missing = [x for x in arg_list if x.name not in argument_sequence] if missing: msg = "Argument list didn't specify: {0} " msg = msg.format(', '.join([str(m.name) for m in missing])) raise CodeGenArgumentListError(msg, missing) # create redundant arguments to produce the requested sequence name_arg_dict = {x.name: x for x in arg_list} new_args = [] for symbol in argument_sequence: try: new_args.append(name_arg_dict[symbol]) except KeyError: dims = None if isinstance(symbol, MatrixSymbol): dims = tuple((Integer(0), dim - 1) for dim in symbol.shape) new_args.append(InputArgument(symbol, dimensions=dims)) arg_list = new_args return Routine(name, arg_list, return_val, local_vars, global_vars) def write(self, routines, prefix, to_files=False, header=True, empty=True): """Writes all the source code files for the given routines. The generated source is returned as a list of (filename, contents) tuples, or is written to files (see below). Each filename consists of the given prefix, appended with an appropriate extension. Parameters ========== routines : list A list of Routine instances to be written prefix : string The prefix for the output files to_files : bool, optional When True, the output is written to files. Otherwise, a list of (filename, contents) tuples is returned. [default: False] header : bool, optional When True, a header comment is included on top of each source file. [default: True] empty : bool, optional When True, empty lines are included to structure the source files. [default: True] """ for routine in routines: if not isinstance(routine, Routine): raise CodeGenError(f'Routine expected, got {routine}') if to_files: for dump_fn in self.dump_fns: filename = f'{prefix}.{dump_fn.extension}' with open(filename, 'w') as f: dump_fn(self, routines, f, prefix, header, empty) else: result = [] for dump_fn in self.dump_fns: filename = f'{prefix}.{dump_fn.extension}' contents = StringIO() dump_fn(self, routines, contents, prefix, header, empty) result.append((filename, contents.getvalue())) return result def dump_code(self, routines, f, prefix, header=True, empty=True): """Write the code by calling language specific methods. The generated file contains all the definitions of the routines in low-level code and refers to the header file if appropriate. Parameters ========== routines : list A list of Routine instances. f : file-like Where to write the file. prefix : string The filename prefix, used to refer to the proper header file. Only the basename of the prefix is used. header : bool, optional When True, a header comment is included on top of each source file. [default : True] empty : bool, optional When True, empty lines are included to structure the source files. [default : True] """ code_lines = self._preprocessor_statements(prefix) for routine in routines: if empty: code_lines.append('\n') code_lines.extend(self._get_routine_opening(routine)) code_lines.extend(self._declare_arguments(routine)) code_lines.extend(self._declare_globals(routine)) code_lines.extend(self._declare_locals(routine)) if empty: code_lines.append('\n') code_lines.extend(self._call_printer(routine)) if empty: code_lines.append('\n') code_lines.extend(self._get_routine_ending(routine)) code_lines = self._indent_code(''.join(code_lines)) if header: code_lines = ''.join(self._get_header() + [code_lines]) if code_lines: f.write(code_lines) def _printer_method_with_settings(self, method, settings=None, *args, **kwargs): settings = settings or {} ori = {k: self.printer._settings[k] for k in settings} for k, v in settings.items(): self.printer._settings[k] = v result = getattr(self.printer, method)(*args, **kwargs) for k, v in ori.items(): self.printer._settings[k] = v return result class CodeGenError(Exception): pass class CodeGenArgumentListError(Exception): @property def missing_args(self): return self.args[1] header_comment = """Code generated with diofant %(version)s See https://diofant.readthedocs.io/ for more information. This file is part of '%(project)s' """ class CCodeGen(CodeGen): """Generator for C code. The .write() method inherited from CodeGen will output a code file and an interface file, <prefix>.c and <prefix>.h respectively. """ code_extension = 'c' interface_extension = 'h' def __init__(self, project='project', printer=None, preprocessor_statements=None, cse=False): super(CCodeGen, self).__init__(project=project, cse=cse) self.printer = printer or CCodePrinter() self.preprocessor_statements = preprocessor_statements if preprocessor_statements is None: self.preprocessor_statements = ['#include <math.h>\n'] def _get_header(self): """Writes a common header for the generated files.""" code_lines = [] code_lines.append('/' + '*'*78 + '\n') tmp = header_comment % {'version': diofant_version, 'project': self.project} for line in tmp.splitlines(): code_lines.append(' *%s*\n' % line.center(76)) code_lines.append(' ' + '*'*78 + '/\n') return code_lines def get_prototype(self, routine): """Returns a string for the function prototype of the routine. If the routine has multiple result objects, an CodeGenError is raised. See: https://en.wikipedia.org/wiki/Function_prototype """ if len(routine.results) > 1: raise CodeGenError('C only supports a single or no return value.') elif len(routine.results) == 1: ctype = routine.results[0].get_datatype('C') else: ctype = 'void' type_args = [] for arg in routine.arguments: name = ccode(arg.name) if arg.dimensions or isinstance(arg, ResultBase): type_args.append((arg.get_datatype('C'), f'*{name}')) else: type_args.append((arg.get_datatype('C'), name)) arguments = ', '.join([ '%s %s' % t for t in type_args]) return f'{ctype} {routine.name}({arguments})' def _preprocessor_statements(self, prefix): code_lines = [] code_lines.append(f'#include "{os.path.basename(prefix)}.h\"\n') code_lines.extend(self.preprocessor_statements) return code_lines def _get_routine_opening(self, routine): prototype = self.get_prototype(routine) return ['%s {\n' % prototype] def _declare_arguments(self, routine): # arguments are declared in prototype return [] def _declare_globals(self, routine): # global variables are not explicitly declared within C functions return [] def _declare_locals(self, routine): # Compose a list of symbols to be dereferenced in the function # body. These are the arguments that were passed by a reference # pointer, excluding arrays. dereference = [] for arg in routine.arguments: if isinstance(arg, ResultBase) and not arg.dimensions: dereference.append(arg.name) code_lines = [] for result in routine.local_vars: # local variables that are simple symbols such as those used as indices into # for loops are defined declared elsewhere. if not isinstance(result, Result): continue assign_to = result.name t = result.get_datatype('c') prefix = f'const {t} ' constants, not_c, c_expr = self._printer_method_with_settings( 'doprint', {'human': False, 'dereference': dereference}, result.expr, assign_to=assign_to) code_lines.append(f'{prefix}{c_expr}\n') return code_lines def _call_printer(self, routine): code_lines = [] # Compose a list of symbols to be dereferenced in the function # body. These are the arguments that were passed by a reference # pointer, excluding arrays. dereference = [] for arg in routine.arguments: if isinstance(arg, ResultBase) and not arg.dimensions: dereference.append(arg.name) return_val = None for result in routine.result_variables: if isinstance(result, Result): assign_to = routine.name + '_result' t = result.get_datatype('c') code_lines.append(f'{t} {assign_to!s};\n') return_val = assign_to else: assign_to = result.result_var constants, not_c, c_expr = ccode(result.expr, human=False, assign_to=assign_to, dereference=dereference) for name, value in sorted(constants, key=str): code_lines.append(f'double const {name} = {value};\n') code_lines.append(f'{c_expr}\n') if return_val: code_lines.append(f' return {return_val};\n') return code_lines def _indent_code(self, codelines): p = CCodePrinter() return p.indent_code(codelines) def _get_routine_ending(self, routine): return ['}\n'] def dump_c(self, routines, f, prefix, header=True, empty=True): self.dump_code(routines, f, prefix, header, empty) dump_c.extension = code_extension # type: ignore[attr-defined] dump_c.__doc__ = CodeGen.dump_code.__doc__ def dump_h(self, routines, f, prefix, header=True, empty=True): """Writes the C header file. This file contains all the function declarations. Parameters ========== routines : list A list of Routine instances. f : file-like Where to write the file. prefix : string The filename prefix, used to construct the include guards. Only the basename of the prefix is used. header : bool, optional When True, a header comment is included on top of each source file. [default : True] empty : bool, optional When True, empty lines are included to structure the source files. [default : True] """ if header: print(''.join(self._get_header()), file=f) guard_name = '%s__%s__H' % (self.project.replace( ' ', '_').upper(), prefix.replace('/', '_').upper()) # include guards if empty: print(file=f) print(f'#ifndef {guard_name}', file=f) print(f'#define {guard_name}', file=f) if empty: print(file=f) # declaration of the function prototypes for routine in routines: prototype = self.get_prototype(routine) print(f'{prototype};', file=f) # end if include guards if empty: print(file=f) print('#endif', file=f) if empty: print(file=f) dump_h.extension = interface_extension # type: ignore[attr-defined] # This list of dump functions is used by CodeGen.write to know which dump # functions it has to call. dump_fns = [dump_c, dump_h] class FCodeGen(CodeGen): """Generator for Fortran 95 code The .write() method inherited from CodeGen will output a code file and an interface file, <prefix>.f90 and <prefix>.h respectively. """ code_extension = 'f90' interface_extension = 'h' def __init__(self, project='project'): CodeGen.__init__(self, project) def _get_symbol(self, s): """Returns the symbol as fcode prints it.""" return fcode(s).strip() def _get_header(self): """Writes a common header for the generated files.""" code_lines = [] code_lines.append('!' + '*'*78 + '\n') tmp = header_comment % {'version': diofant_version, 'project': self.project} for line in tmp.splitlines(): code_lines.append('!*%s*\n' % line.center(76)) code_lines.append('!' + '*'*78 + '\n') return code_lines def _preprocessor_statements(self, prefix): return [] def _get_routine_opening(self, routine): """Returns the opening statements of the fortran routine.""" code_list = [] if len(routine.results) > 1: raise CodeGenError( 'Fortran only supports a single or no return value.') elif len(routine.results) == 1: result = routine.results[0] code_list.append(result.get_datatype('fortran')) code_list.append('function') else: code_list.append('subroutine') args = ', '.join('%s' % self._get_symbol(arg.name) for arg in routine.arguments) call_sig = f'{routine.name}({args})\n' # Fortran 95 requires all lines be less than 132 characters, so wrap # this line before appending. call_sig = ' &\n'.join(textwrap.wrap(call_sig, width=60, break_long_words=False)) + '\n' code_list.append(call_sig) code_list = [' '.join(code_list)] code_list.append('implicit none\n') return code_list def _declare_arguments(self, routine): # argument type declarations code_list = [] array_list = [] scalar_list = [] for arg in routine.arguments: if isinstance(arg, InputArgument): typeinfo = '%s, intent(in)' % arg.get_datatype('fortran') elif isinstance(arg, InOutArgument): typeinfo = '%s, intent(inout)' % arg.get_datatype('fortran') else: typeinfo = '%s, intent(out)' % arg.get_datatype('fortran') fprint = self._get_symbol if arg.dimensions: # fortran arrays start at 1 dimstr = ', '.join(['%s:%s' % ( fprint(dim[0] + 1), fprint(dim[1] + 1)) for dim in arg.dimensions]) typeinfo += f', dimension({dimstr})' array_list.append(f'{typeinfo} :: {fprint(arg.name)}\n') else: scalar_list.append(f'{typeinfo} :: {fprint(arg.name)}\n') # scalars first, because they can be used in array declarations code_list.extend(scalar_list) code_list.extend(array_list) return code_list def _declare_globals(self, routine): # Global variables not explicitly declared within Fortran 90 functions. # Note: a future F77 mode may need to generate "common" blocks. return [] def _declare_locals(self, routine): code_list = [] for var in sorted(routine.local_vars, key=str): typeinfo = get_default_datatype(var) code_list.append(f'{typeinfo.fname} :: {self._get_symbol(var)}\n') return code_list def _get_routine_ending(self, routine): """Returns the closing statements of the fortran routine.""" if len(routine.results) == 1: return ['end function\n'] else: return ['end subroutine\n'] def get_interface(self, routine): """Returns a string for the function interface. The routine should have a single result object, which can be None. If the routine has multiple result objects, a CodeGenError is raised. See: https://en.wikipedia.org/wiki/Function_prototype """ prototype = [ 'interface\n' ] prototype.extend(self._get_routine_opening(routine)) prototype.extend(self._declare_arguments(routine)) prototype.extend(self._get_routine_ending(routine)) prototype.append('end interface\n') return ''.join(prototype) def _call_printer(self, routine): declarations = [] code_lines = [] for result in routine.result_variables: if isinstance(result, Result): assign_to = routine.name else: assign_to = result.result_var constants, not_fortran, f_expr = fcode(result.expr, assign_to=assign_to, source_format='free', human=False) for obj, v in sorted(constants, key=str): t = get_default_datatype(obj) declarations.append( f'{t.fname}, parameter :: {obj} = {v}\n') for obj in sorted(not_fortran, key=str): t = get_default_datatype(obj) if isinstance(obj, Function): name = obj.func else: name = obj declarations.append(f'{t.fname} :: {name}\n') code_lines.append(f'{f_expr}\n') return declarations + code_lines def _indent_code(self, codelines): p = FCodePrinter({'source_format': 'free', 'human': False}) return p.indent_code(codelines) def dump_f95(self, routines, f, prefix, header=True, empty=True): # check that symbols are unique with ignorecase for r in routines: lowercase = {str(x).lower() for x in r.variables} orig_case = {str(x) for x in r.variables} if len(lowercase) < len(orig_case): raise CodeGenError('Fortran ignores case. Got symbols: %s' % (', '.join([str(var) for var in r.variables]))) self.dump_code(routines, f, prefix, header, empty) dump_f95.extension = code_extension # type: ignore[attr-defined] dump_f95.__doc__ = CodeGen.dump_code.__doc__ def dump_h(self, routines, f, prefix, header=True, empty=True): """Writes the interface to a header file. This file contains all the function declarations. Parameters ========== routines : list A list of Routine instances. f : file-like Where to write the file. prefix : string The filename prefix. header : bool, optional When True, a header comment is included on top of each source file. [default : True] empty : bool, optional When True, empty lines are included to structure the source files. [default : True] """ if header: print(''.join(self._get_header()), file=f) if empty: print(file=f) # declaration of the function prototypes for routine in routines: prototype = self.get_interface(routine) f.write(prototype) if empty: print(file=f) dump_h.extension = interface_extension # type: ignore[attr-defined] # This list of dump functions is used by CodeGen.write to know which dump # functions it has to call. dump_fns = [dump_f95, dump_h] class OctaveCodeGen(CodeGen): """Generator for Octave code. The .write() method inherited from CodeGen will output a code file <prefix>.m. Octave .m files usually contain one function. That function name should match the filename (``prefix``). If you pass multiple ``name_expr`` pairs, the latter ones are presumed to be private functions accessed by the primary function. You should only pass inputs to ``argument_sequence``: outputs are ordered according to their order in ``name_expr``. """ code_extension = 'm' def routine(self, name, expr, argument_sequence, global_vars): """Specialized Routine creation for Octave.""" # FIXME: this is probably general enough for other high-level # languages, perhaps its the C/Fortran one that is specialized! if is_sequence(expr) and not isinstance(expr, (MatrixBase, MatrixExpr)): if not expr: raise ValueError('No expression given') expressions = Tuple(*expr) else: expressions = Tuple(expr) # local variables local_vars = {i.label for i in expressions.atoms(Idx)} # global variables global_vars = set() if global_vars is None else set(global_vars) # symbols that should be arguments symbols = expressions.free_symbols - local_vars - global_vars # Octave supports multiple return values return_vals = [] for (i, expr) in enumerate(expressions): if isinstance(expr, Equality): out_arg = expr.lhs expr = expr.rhs symbol = out_arg if isinstance(out_arg, Indexed): symbol = out_arg.base.label if not isinstance(out_arg, (Indexed, Symbol, MatrixSymbol)): raise CodeGenError('Only Indexed, Symbol, or MatrixSymbol ' 'can define output arguments.') return_vals.append(Result(expr, name=symbol, result_var=out_arg)) if not expr.has(symbol): # this is a pure output: remove from the symbols list, so # it doesn't become an input. symbols.remove(symbol) else: # we have no name for this output return_vals.append(Result(expr, name='out%d' % (i+1))) # setup input argument list arg_list = [] array_symbols = {} for array in expressions.atoms(Indexed): array_symbols[array.base.label] = array for array in expressions.atoms(MatrixSymbol): array_symbols[array] = array for symbol in sorted(symbols, key=str): arg_list.append(InputArgument(symbol)) if argument_sequence is not None: # if the user has supplied IndexedBase instances, we'll accept that new_sequence = [] for arg in argument_sequence: if isinstance(arg, IndexedBase): new_sequence.append(arg.label) else: new_sequence.append(arg) argument_sequence = new_sequence missing = [x for x in arg_list if x.name not in argument_sequence] if missing: msg = "Argument list didn't specify: {0} " msg = msg.format(', '.join([str(m.name) for m in missing])) raise CodeGenArgumentListError(msg, missing) # create redundant arguments to produce the requested sequence name_arg_dict = {x.name: x for x in arg_list} new_args = [] for symbol in argument_sequence: try: new_args.append(name_arg_dict[symbol]) except KeyError: new_args.append(InputArgument(symbol)) arg_list = new_args return Routine(name, arg_list, return_vals, local_vars, global_vars) def _get_symbol(self, s): """Print the symbol appropriately.""" return octave_code(s).strip() def _get_header(self): """Writes a common header for the generated files.""" code_lines = [] tmp = header_comment % {'version': diofant_version, 'project': self.project} for line in tmp.splitlines(): if line == '': code_lines.append('%\n') else: code_lines.append(f'% {line}\n') return code_lines def _preprocessor_statements(self, prefix): return [] def _get_routine_opening(self, routine): """Returns the opening statements of the routine.""" code_list = [] code_list.append('function ') # Outputs outs = [] for i, result in enumerate(routine.results): # Note: name not result_var; want `y` not `y(i)` for Indexed s = self._get_symbol(result.name) outs.append(s) if len(outs) > 1: code_list.append('[' + (', '.join(outs)) + ']') else: code_list.append(''.join(outs)) code_list.append(' = ') # Inputs args = [] for i, arg in enumerate(routine.arguments): if isinstance(arg, (OutputArgument, InOutArgument)): raise CodeGenError('Octave: invalid argument of type %s' % str(type(arg))) else: args.append('%s' % self._get_symbol(arg.name)) args = ', '.join(args) code_list.append(f'{routine.name}({args})\n') code_list = [ ''.join(code_list) ] return code_list def _declare_arguments(self, routine): return [] def _declare_globals(self, routine): if not routine.global_vars: return [] s = ' '.join(sorted(self._get_symbol(g) for g in routine.global_vars)) return ['global ' + s + '\n'] def _declare_locals(self, routine): return [] def _get_routine_ending(self, routine): return ['end\n'] def _call_printer(self, routine): declarations = [] code_lines = [] for i, result in enumerate(routine.results): assign_to = result.result_var constants, not_supported, oct_expr = octave_code(result.expr, assign_to=assign_to, human=False, inline=False) for obj, v in sorted(constants, key=str): declarations.append( f' {obj} = {v}; % constant\n') for obj in sorted(not_supported, key=str): if isinstance(obj, Function): name = obj.func else: name = obj declarations.append( f' % unsupported: {name}\n') code_lines.append(f'{oct_expr}\n') return declarations + code_lines def _indent_code(self, codelines): # Note that indenting seems to happen twice, first # statement-by-statement by OctavePrinter then again here. p = OctaveCodePrinter({'human': False}) return p.indent_code(codelines) def dump_m(self, routines, f, prefix, header=True, empty=True, inline=True): # Note used to call self.dump_code() but we need more control for header code_lines = self._preprocessor_statements(prefix) for i, routine in enumerate(routines): if i > 0: if empty: code_lines.append('\n') code_lines.extend(self._get_routine_opening(routine)) if i == 0: if routine.name != prefix: raise ValueError('Octave function name should match prefix') if header: code_lines.append('%' + prefix.upper() + ' Autogenerated by diofant\n') code_lines.append(''.join(self._get_header())) code_lines.extend(self._declare_arguments(routine)) code_lines.extend(self._declare_globals(routine)) code_lines.extend(self._declare_locals(routine)) if empty: code_lines.append('\n') code_lines.extend(self._call_printer(routine)) if empty: code_lines.append('\n') code_lines.extend(self._get_routine_ending(routine)) code_lines = self._indent_code(''.join(code_lines)) if code_lines: f.write(code_lines) dump_m.extension = code_extension # type: ignore[attr-defined] dump_m.__doc__ = CodeGen.dump_code.__doc__ # This list of dump functions is used by CodeGen.write to know which dump # functions it has to call. dump_fns = [dump_m] def get_code_generator(language, project): CodeGenClass = {'C': CCodeGen, 'F95': FCodeGen, 'OCTAVE': OctaveCodeGen}.get(language.upper()) if CodeGenClass is None: raise ValueError(f"Language '{language}' is not supported.") return CodeGenClass(project) # # Friendly functions # def codegen(name_expr, language, prefix=None, project='project', to_files=False, header=True, empty=True, argument_sequence=None, global_vars=None): """Generate source code for expressions in a given language. Parameters ========== name_expr : tuple, or list of tuples A single (name, expression) tuple or a list of (name, expression) tuples. Each tuple corresponds to a routine. If the expression is an equality (an instance of class Equality) the left hand side is considered an output argument. If expression is an iterable, then the routine will have multiple outputs. language : string A string that indicates the source code language. This is case insensitive. Currently, 'C', 'F95' and 'Octave' are supported. 'Octave' generates code compatible with both Octave and Matlab. prefix : string, optional A prefix for the names of the files that contain the source code. Language-dependent suffixes will be appended. If omitted, the name of the first name_expr tuple is used. project : string, optional A project name, used for making unique preprocessor instructions. [default: "project"] to_files : bool, optional When True, the code will be written to one or more files with the given prefix, otherwise strings with the names and contents of these files are returned. [default: False] header : bool, optional When True, a header is written on top of each source file. [default: True] empty : bool, optional When True, empty lines are used to structure the code. [default: True] argument_sequence : iterable, optional Sequence of arguments for the routine in a preferred order. A CodeGenError is raised if required arguments are missing. Redundant arguments are used without warning. If omitted, arguments will be ordered alphabetically, but with all input aguments first, and then output or in-out arguments. global_vars : iterable, optional Sequence of global variables used by the routine. Variables listed here will not show up as function arguments. Examples ======== >>> [(c_name, c_code), (h_name, c_header)] = codegen( ... ('f', x+y*z), 'C', 'test', header=False, empty=False) >>> print(c_name) test.c >>> print(c_code) #include "test.h" #include <math.h> double f(double x, double y, double z) { double f_result; f_result = x + y*z; return f_result; } >>> print(h_name) test.h >>> print(c_header) #ifndef PROJECT__TEST__H #define PROJECT__TEST__H double f(double x, double y, double z); #endif Another example using Equality objects to give named outputs. Here the filename (prefix) is taken from the first (name, expr) pair. >>> from diofant.abc import f, g >>> [(c_name, c_code), ... (h_name, c_header)] = codegen([('myfcn', x + y), ... ('fcn2', [Eq(f, 2*x), Eq(g, y)])], ... 'C', header=False, empty=False) >>> print(c_name) myfcn.c >>> print(c_code) #include "myfcn.h" #include <math.h> double myfcn(double x, double y) { double myfcn_result; myfcn_result = x + y; return myfcn_result; } void fcn2(double x, double y, double *f, double *g) { (*f) = 2*x; (*g) = y; } If the generated function(s) will be part of a larger project where various global variables have been defined, the 'global_vars' option can be used to remove the specified variables from the function signature >>> [(f_name, f_code), header] = codegen( ... ('f', x+y*z), 'F95', header=False, empty=False, ... argument_sequence=(x, y), global_vars=(z,)) >>> print(f_code) REAL*8 function f(x, y) implicit none REAL*8, intent(in) :: x REAL*8, intent(in) :: y f = x + y*z end function """ # Initialize the code generator. code_gen = get_code_generator(language, project) if isinstance(name_expr[0], str): # single tuple is given, turn it into a singleton list with a tuple. name_expr = [name_expr] if prefix is None: prefix = name_expr[0][0] # Construct Routines appropriate for this code_gen from (name, expr) pairs. routines = [] for name, expr in name_expr: routines.append(code_gen.routine(name, expr, argument_sequence, global_vars)) # Write the code. return code_gen.write(routines, prefix, to_files, header, empty) def make_routine(name, expr, argument_sequence=None, global_vars=None, language='F95'): """A factory that makes an appropriate Routine from an expression. Parameters ========== name : string The name of this routine in the generated code. expr : expression or list/tuple of expressions A Diofant expression that the Routine instance will represent. If given a list or tuple of expressions, the routine will be considered to have multiple return values and/or output arguments. argument_sequence : list or tuple, optional List arguments for the routine in a preferred order. If omitted, the results are language dependent, for example, alphabetical order or in the same order as the given expressions. global_vars : iterable, optional Sequence of global variables used by the routine. Variables listed here will not show up as function arguments. language : string, optional Specify a target language. The Routine itself should be language-agnostic but the precise way one is created, error checking, etc depend on the language. [default: "F95"]. A decision about whether to use output arguments or return values is made depending on both the language and the particular mathematical expressions. For an expression of type Equality, the left hand side is typically made into an OutputArgument (or perhaps an InOutArgument if appropriate). Otherwise, typically, the calculated expression is made a return values of the routine. Examples ======== >>> from diofant.abc import f, g >>> r = make_routine('test', [Eq(f, 2*x), Eq(g, x + y)]) >>> [arg.result_var for arg in r.results] [] >>> [arg.name for arg in r.arguments] [x, y, f, g] >>> [arg.name for arg in r.result_variables] [f, g] >>> r.local_vars set() Another more complicated example with a mixture of specified and automatically-assigned names. Also has Matrix output. >>> r = make_routine('fcn', [x*y, Eq(f, 1), Eq(g, x + g), Matrix([[x, 2]])]) >>> [arg.result_var for arg in r.results] [result_...] >>> [arg.expr for arg in r.results] [x*y] >>> [arg.name for arg in r.arguments] [x, y, f, g, out_...] We can examine the various arguments more closely: >>> [a.name for a in r.arguments if isinstance(a, InputArgument)] [x, y] >>> [a.name for a in r.arguments if isinstance(a, OutputArgument)] [f, out_...] >>> [a.expr for a in r.arguments if isinstance(a, OutputArgument)] [1, Matrix([[x, 2]])] >>> [a.name for a in r.arguments if isinstance(a, InOutArgument)] [g] >>> [a.expr for a in r.arguments if isinstance(a, InOutArgument)] [g + x] """ # initialize a new code generator code_gen = get_code_generator(language, 'nothingElseMatters') return code_gen.routine(name, expr, argument_sequence, global_vars)
import pytest from diofant import (DiracDelta, Eq, Heaviside, I, Piecewise, Rational, Symbol, adjoint, conjugate, nan, pi, sign, sqrt, symbols, transpose) from diofant.abc import x, y, z from diofant.core.function import ArgumentIndexError __all__ = () def test_DiracDelta(): i = Symbol('i', nonzero=True) j = Symbol('j', positive=True) k = Symbol('k', negative=True) assert DiracDelta(1) == 0 assert DiracDelta(5.1) == 0 assert DiracDelta(-pi) == 0 assert DiracDelta(5, 7) == 0 assert DiracDelta(i) == 0 assert DiracDelta(j) == 0 assert DiracDelta(k) == 0 assert DiracDelta(nan) == nan assert isinstance(DiracDelta(0), DiracDelta) assert isinstance(DiracDelta(x), DiracDelta) assert adjoint(DiracDelta(x)) == DiracDelta(x) assert adjoint(DiracDelta(x - y)) == DiracDelta(x - y) assert conjugate(DiracDelta(x)) == DiracDelta(x) assert conjugate(DiracDelta(x - y)) == DiracDelta(x - y) assert transpose(DiracDelta(x)) == DiracDelta(x) assert transpose(DiracDelta(x - y)) == DiracDelta(x - y) assert DiracDelta(x).diff(x) == DiracDelta(x, 1) assert DiracDelta(x, 1).diff(x) == DiracDelta(x, 2) assert DiracDelta(x).is_simple(x) is True assert DiracDelta(3*x).is_simple(x) is True assert DiracDelta(x**2).is_simple(x) is False assert DiracDelta(sqrt(x)).is_simple(x) is False assert DiracDelta(x).is_simple(y) is False assert DiracDelta(x*y).simplify(x) == DiracDelta(x)/abs(y) assert DiracDelta(x*y).simplify(y) == DiracDelta(y)/abs(x) assert DiracDelta(x**2*y).simplify(x) == DiracDelta(x**2*y) assert DiracDelta(y).simplify(x) == DiracDelta(y) assert DiracDelta((x - 1)*(x - 2)*(x - 3)).simplify(x) == \ DiracDelta(x - 3)/2 + DiracDelta(x - 2) + DiracDelta(x - 1)/2 pytest.raises(ArgumentIndexError, lambda: DiracDelta(x).fdiff(2)) pytest.raises(ValueError, lambda: DiracDelta(x, -1)) def test_heaviside(): x, y = symbols('x, y', extended_real=True) assert Heaviside(0) == 0.5 assert Heaviside(-5) == 0 assert Heaviside(1) == 1 assert Heaviside(nan) == nan assert Heaviside(x).is_real assert adjoint(Heaviside(x)) == Heaviside(x) assert adjoint(Heaviside(x - y)) == Heaviside(x - y) assert conjugate(Heaviside(x)) == Heaviside(x) assert conjugate(Heaviside(x - y)) == Heaviside(x - y) assert transpose(Heaviside(x)) == Heaviside(x) assert transpose(Heaviside(x - y)) == Heaviside(x - y) assert Heaviside(x).diff(x) == DiracDelta(x) assert Heaviside(z + I).is_Function is True assert Heaviside(I*z).is_Function is True pytest.raises(ArgumentIndexError, lambda: Heaviside(x).fdiff(2)) pytest.raises(ValueError, lambda: Heaviside(I)) pytest.raises(ValueError, lambda: Heaviside(2 + 3*I)) def test_rewrite(): x = Symbol('x', extended_real=True) assert Heaviside(x).rewrite(Piecewise) == \ Piecewise((1, x > 0), (Rational(1, 2), Eq(x, 0)), (0, True)) assert Heaviside(y).rewrite(Piecewise) == Heaviside(y) assert Heaviside(x).rewrite(sign) == (sign(x)+1)/2 assert Heaviside(y).rewrite(sign) == Heaviside(y)
diofant/diofant
diofant/tests/functions/test_delta_functions.py
diofant/utilities/codegen.py
"""User-friendly public interface to polynomial functions.""" import functools import math import operator import mpmath from ..core import (Add, Basic, E, Expr, Integer, Mul, Tuple, oo, preorder_traversal) from ..core.compatibility import iterable from ..core.decorators import _sympifyit from ..core.mul import _keep_coeff from ..core.relational import Relational from ..core.sympify import sympify from ..domains import FF, QQ, ZZ from ..domains.compositedomain import CompositeDomain from ..logic.boolalg import BooleanAtom from ..utilities import default_sort_key, group, sift from .constructor import construct_domain from .groebnertools import groebner as _groebner from .groebnertools import matrix_fglm from .monomials import Monomial from .orderings import monomial_key from .polyerrors import (CoercionFailed, ComputationFailed, DomainError, ExactQuotientFailed, GeneratorsError, GeneratorsNeeded, MultivariatePolynomialError, PolificationFailed, PolynomialError, UnificationFailed) from .polyoptions import Modulus, Options, Order, allowed_flags, build_options from .polyutils import _find_gens, _parallel_dict_from_expr, _sort_gens from .rationaltools import together from .rings import PolyElement __all__ = ('Poly', 'PurePoly', 'parallel_poly_from_expr', 'degree', 'LC', 'LM', 'LT', 'div', 'rem', 'quo', 'exquo', 'half_gcdex', 'gcdex', 'invert', 'subresultants', 'resultant', 'discriminant', 'cofactors', 'gcd', 'lcm', 'terms_gcd', 'trunc', 'monic', 'content', 'primitive', 'compose', 'decompose', 'sqf_norm', 'sqf_part', 'sqf_list', 'sqf', 'factor_list', 'factor', 'count_roots', 'real_roots', 'nroots', 'cancel', 'reduced', 'groebner', 'GroebnerBasis', 'poly') class Poly(Expr): """Generic class for representing polynomial expressions.""" is_commutative = True is_Poly = True _op_priority = 10.1 def __new__(cls, rep, *gens, **args): """Create a new polynomial instance out of something useful.""" opt = build_options(gens, args) if iterable(rep, exclude=str): if isinstance(rep, dict): return cls._from_dict(rep, opt) else: return cls._from_list(list(rep), opt) else: rep = sympify(rep) if rep.is_Poly: return cls._from_poly(rep, opt) else: return cls._from_expr(rep, opt) @classmethod def new(cls, rep, *gens): """Construct :class:`Poly` instance from raw representation.""" if not isinstance(rep, PolyElement): raise PolynomialError( f'invalid polynomial representation: {rep}') elif rep.ring.ngens != len(gens): raise PolynomialError(f'invalid arguments: {rep}, {gens}') obj = Expr.__new__(cls) obj.rep = rep obj.gens = gens return obj @classmethod def from_dict(cls, rep, *gens, **args): """Construct a polynomial from a :class:`dict`.""" opt = build_options(gens, args) return cls._from_dict(rep, opt) @classmethod def from_list(cls, rep, *gens, **args): """Construct a polynomial from a :class:`list`.""" opt = build_options(gens, args) return cls._from_list(rep, opt) @classmethod def from_poly(cls, rep, *gens, **args): """Construct a polynomial from a polynomial.""" opt = build_options(gens, args) return cls._from_poly(rep, opt) @classmethod def from_expr(cls, rep, *gens, **args): """Construct a polynomial from an expression.""" opt = build_options(gens, args) return cls._from_expr(rep, opt) @classmethod def _from_dict(cls, rep, opt): """Construct a polynomial from a :class:`dict`.""" gens = opt.gens if not gens: raise GeneratorsNeeded( "can't initialize from 'dict' without generators") domain = opt.domain if domain is None: domain, rep = construct_domain(rep, opt=opt) else: for monom, coeff in rep.items(): rep[monom] = domain.convert(coeff) ring = domain.poly_ring(*gens, order=opt.order) return cls.new(ring.from_dict(rep), *gens) @classmethod def _from_list(cls, rep, opt): """Construct a polynomial from a :class:`list`.""" gens = opt.gens if not gens: raise GeneratorsNeeded( "can't initialize from 'list' without generators") elif len(gens) != 1: raise MultivariatePolynomialError( "'list' representation not supported") domain = opt.domain if domain is None: domain, rep = construct_domain(rep, opt=opt) else: rep = list(map(domain.convert, rep)) ring = domain.poly_ring(*gens) return cls.new(ring.from_list(rep), *gens) @classmethod def _from_poly(cls, rep, opt): """Construct a polynomial from a polynomial.""" if cls != rep.__class__: rep = cls.new(rep.rep, *rep.gens) gens = opt.gens if opt.composite or (gens and set(rep.gens) != set(gens)): return cls._from_expr(rep.as_expr(), opt) if gens and rep.gens != gens: rep = rep.reorder(*gens) if opt.domain: rep = rep.set_domain(opt.domain) elif opt.field: rep = rep.to_field() return rep @classmethod def _from_expr(cls, rep, opt): """Construct a polynomial from an expression.""" (rep,), opt = _parallel_dict_from_expr([rep], opt) return cls._from_dict(rep, opt) def _hashable_content(self): """Allow Diofant to hash Poly instances.""" return self.rep, self.gens def __hash__(self): return super().__hash__() @property def free_symbols(self): """ Free symbols of a polynomial expression. Examples ======== >>> (x**2 + 1).as_poly().free_symbols {x} >>> (x**2 + y).as_poly().free_symbols {x, y} >>> (x**2 + y).as_poly(x).free_symbols {x, y} """ symbols = set() for gen in self.gens: symbols |= gen.free_symbols return symbols | self.free_symbols_in_domain @property def free_symbols_in_domain(self): """ Free symbols of the domain of ``self``. Examples ======== >>> (x**2 + 1).as_poly().free_symbols_in_domain set() >>> (x**2 + y).as_poly().free_symbols_in_domain set() >>> (x**2 + y).as_poly(x).free_symbols_in_domain {y} """ domain, symbols = self.domain, set() if isinstance(domain, CompositeDomain): for gen in domain.symbols: symbols |= gen.free_symbols elif domain.is_ExpressionDomain: for coeff in self.coeffs(): symbols |= coeff.free_symbols return symbols @property def args(self): """ Don't mess up with the core. Examples ======== >>> (x**2 + 1).as_poly().args (x**2 + 1, x) """ return (self.as_expr(),) + self.gens @property def is_number(self): return self.as_expr().is_number @property def gen(self): """ Return the principal generator. Examples ======== >>> (x**2 + 1).as_poly().gen x """ return self.gens[0] @property def domain(self): """Get the ground domain of ``self``.""" return self.rep.ring.domain def unify(self, other): """ Make ``self`` and ``other`` belong to the same domain. Examples ======== >>> f, g = (x/2 + 1).as_poly(), (2*x + 1).as_poly() >>> f Poly(1/2*x + 1, x, domain='QQ') >>> g Poly(2*x + 1, x, domain='ZZ') >>> F, G = f.unify(g) >>> F Poly(1/2*x + 1, x, domain='QQ') >>> G Poly(2*x + 1, x, domain='QQ') """ _, per, F, G = self._unify(other) return per(F), per(G) def _unify(self, other): other = sympify(other) if not other.is_Poly: try: return (self.domain, self.per, self.rep, self.rep.ring(self.domain.convert(other))) except CoercionFailed: raise UnificationFailed(f"can't unify {self} with {other}") newring = self.rep.ring.unify(other.rep.ring) gens = newring.symbols F, G = self.rep.set_ring(newring), other.rep.set_ring(newring) cls = self.__class__ dom = newring.domain def per(rep, dom=dom, gens=gens, remove=None): if remove is not None: gens = gens[:remove] + gens[remove + 1:] if not gens: return dom.to_expr(rep) return cls.new(rep, *gens) return dom, per, F, G def per(self, rep, gens=None, remove=None): """ Create a Poly out of the given representation. Examples ======== >>> a = (x**2 + 1).as_poly() >>> R = ZZ.inject(x) >>> a.per(R.from_list([ZZ(1), ZZ(1)]), gens=[y]) Poly(y + 1, y, domain='ZZ') """ if gens is None: gens = self.gens if remove is not None: gens = gens[:remove] + gens[remove + 1:] if not gens: return self.domain.to_expr(rep) return self.__class__.new(rep, *gens) def set_domain(self, domain): """Set the ground domain of ``self``.""" opt = build_options(self.gens, {'domain': domain}) newrep = self.rep.set_domain(opt.domain) return self.per(newrep) def set_modulus(self, modulus): """ Set the modulus of ``self``. Examples ======== >>> (5*x**2 + 2*x - 1).as_poly().set_modulus(2) Poly(x**2 + 1, x, modulus=2) """ modulus = Modulus.preprocess(modulus) return self.set_domain(FF(modulus)) def get_modulus(self): """ Get the modulus of ``self``. Examples ======== >>> (x**2 + 1).as_poly(modulus=2).get_modulus() 2 """ domain = self.domain if domain.is_FiniteField: return Integer(domain.order) else: raise PolynomialError('not a polynomial over a Galois field') def _eval_subs(self, old, new): """Internal implementation of :func:`~diofant.core.basic.Basic.subs`.""" if old in self.gens: if new.is_number: return self.eval(old, new) else: try: return self.replace(old, new) except PolynomialError: pass return self.as_expr().subs({old: new}) def exclude(self): """ Remove unnecessary generators from ``self``. Examples ======== >>> (a + x).as_poly(a, b, c, d, x).exclude() Poly(a + x, a, x, domain='ZZ') """ rep = self.rep if rep.is_ground: return self for x in rep.ring.symbols: try: rep = rep.drop(x) except ValueError: pass return self.per(rep, gens=rep.ring.symbols) def replace(self, x, y=None): """ Replace ``x`` with ``y`` in generators list. Examples ======== >>> (x**2 + 1).as_poly().replace(x, y) Poly(y**2 + 1, y, domain='ZZ') """ if y is None: if self.is_univariate: x, y = self.gen, x else: raise PolynomialError( 'syntax supported only in univariate case') if x == y: return self if x in self.gens and y not in self.gens: dom = self.domain if not isinstance(dom, CompositeDomain) or y not in dom.symbols: gens = list(self.gens) gens[gens.index(x)] = y rep = dom.poly_ring(*gens).from_dict(dict(self.rep)) return self.per(rep, gens=gens) raise PolynomialError(f"can't replace {x} with {y} in {self}") def reorder(self, *gens, **args): """ Efficiently apply new order of generators. Examples ======== >>> (x**2 + x*y**2).as_poly().reorder(y, x) Poly(y**2*x + x**2, y, x, domain='ZZ') """ opt = Options((), args) if not gens: gens = _sort_gens(self.gens, opt=opt) elif set(self.gens) != set(gens): raise PolynomialError( 'generators list can differ only up to order of elements') rep = self.rep new_ring = rep.ring.clone(symbols=gens) rep = rep.set_ring(new_ring) return self.per(rep, gens=gens) def has_only_gens(self, *gens): """ Return ``True`` if ``Poly(f, *gens)`` retains ground domain. Examples ======== >>> (x*y + 1).as_poly(x, y, z).has_only_gens(x, y) True >>> (x*y + z).as_poly(x, y, z).has_only_gens(x, y) False """ indices = set() for gen in gens: try: index = self.gens.index(gen) except ValueError: raise GeneratorsError( f"{self} doesn't have {gen} as generator") else: indices.add(index) for monom in self.monoms(): for i, elt in enumerate(monom): if i not in indices and elt: return False return True def to_ring(self): """ Make the ground domain a ring. Examples ======== >>> (x**2 + 1).as_poly(field=True).to_ring() Poly(x**2 + 1, x, domain='ZZ') """ return self.set_domain(self.domain.ring) def to_field(self): """ Make the ground domain a field. Examples ======== >>> (x**2 + 1).as_poly().to_field() Poly(x**2 + 1, x, domain='QQ') """ return self.set_domain(self.domain.field) def to_exact(self): """ Make the ground domain exact. Examples ======== >>> (x**2 + 1.0).as_poly().to_exact() Poly(x**2 + 1, x, domain='QQ') """ return self.set_domain(self.domain.get_exact()) def retract(self, field=None): """ Recalculate the ground domain of a polynomial. Examples ======== >>> f = (x**2 + 1).as_poly(domain=QQ.inject(y)) >>> f Poly(x**2 + 1, x, domain='QQ[y]') >>> f.retract() Poly(x**2 + 1, x, domain='ZZ') >>> f.retract(field=True) Poly(x**2 + 1, x, domain='QQ') """ dom, rep = construct_domain(self.as_dict(), field=field, composite=isinstance(self.domain, CompositeDomain) or None, extension=False if self.domain.is_ExpressionDomain else True) return self.from_dict(rep, *self.gens, domain=dom) def slice(self, x, m, n=None): """Take a continuous subsequence of terms of ``self``.""" if n is None: j, m, n = 0, x, m else: j = self._gen_to_level(x) m, n = int(m), int(n) result = self.rep.slice(m, n, j) return self.per(result) def coeffs(self, order=None): """ Returns all non-zero coefficients from ``self`` in lex order. Examples ======== >>> (x**3 + 2*x + 3).as_poly().coeffs() [1, 2, 3] See Also ======== all_coeffs coeff_monomial """ return [coeff for _, coeff in self.terms(order)] def monoms(self, order=None): """ Returns all non-zero monomials from ``self`` in lex order. Examples ======== >>> (x**2 + 2*x*y**2 + x*y + 3*y).as_poly().monoms() [(2, 0), (1, 2), (1, 1), (0, 1)] """ return [monom for monom, _ in self.terms(order)] def terms(self, order=None): """ Returns all non-zero terms from ``self`` in lex order. Examples ======== >>> (x**2 + 2*x*y**2 + x*y + 3*y).as_poly().terms() [((2, 0), 1), ((1, 2), 2), ((1, 1), 1), ((0, 1), 3)] """ rep = self.rep if order is None: order = rep.ring.order else: order = Order.preprocess(order) return [(m, self.domain.to_expr(c)) for m, c in sorted(rep.items(), key=lambda monom: order(monom[0]), reverse=True)] def all_coeffs(self): """ Returns all coefficients from a univariate polynomial ``self``. Examples ======== >>> (x**3 + 2*x - 1).as_poly().all_coeffs() [-1, 2, 0, 1] """ return [self.domain.to_expr(c) for c in self.rep.all_coeffs()] def termwise(self, func, *gens, **args): """ Apply a function to all terms of ``self``. Examples ======== >>> def func(k, coeff): ... k = k[0] ... return coeff//10**(2-k) >>> (x**2 + 20*x + 400).as_poly().termwise(func) Poly(x**2 + 2*x + 4, x, domain='ZZ') """ terms = {} for monom, coeff in self.terms(): result = func(monom, coeff) if isinstance(result, tuple): monom, coeff = result else: coeff = result if coeff: if monom not in terms: terms[monom] = coeff else: raise PolynomialError(f'{monom} monomial was generated twice') return self.from_dict(terms, *(gens or self.gens), **args) def length(self): """ Returns the number of non-zero terms in ``self``. Examples ======== >>> (x**2 + 2*x - 1).as_poly().length() 3 """ return len(self.as_dict()) def as_dict(self, native=False): """ Switch to a :class:`dict` representation. Examples ======== >>> (x**2 + 2*x*y**2 - y).as_poly().as_dict() {(0, 1): -1, (1, 2): 2, (2, 0): 1} """ if native: return dict(self.rep) else: return {k: self.domain.to_expr(v) for k, v in self.rep.items()} def as_expr(self, *gens): """ Convert a Poly instance to an Expr instance. Examples ======== >>> f = (x**2 + 2*x*y**2 - y).as_poly() >>> f.as_expr() x**2 + 2*x*y**2 - y >>> f.as_expr({x: 5}) 10*y**2 - y + 25 >>> f.as_expr(5, 6) 379 """ if not gens: gens = self.gens elif len(gens) == 1 and isinstance(gens[0], dict): mapping = gens[0] gens = list(self.gens) for gen, value in mapping.items(): try: index = gens.index(gen) except ValueError: raise GeneratorsError( f"{self} doesn't have {gen} as generator") else: gens[index] = value rep = self.rep return rep.ring.to_expr(rep).subs(zip(self.gens, gens)) def inject(self, front=False): """ Inject ground domain generators into ``self``. Examples ======== >>> f = (x**2*y + x*y**3 + x*y + 1).as_poly(x) >>> f.inject() Poly(x**2*y + x*y**3 + x*y + 1, x, y, domain='ZZ') >>> f.inject(front=True) Poly(y**3*x + y*x**2 + y*x + 1, y, x, domain='ZZ') """ result = self.rep.inject(front=front) return self.new(result, *result.ring.symbols) def eject(self, *gens): """ Eject selected generators into the ground domain. Examples ======== >>> f = (x**2*y + x*y**3 + x*y + 1).as_poly() >>> f.eject(x) Poly(x*y**3 + (x**2 + x)*y + 1, y, domain='ZZ[x]') >>> f.eject(y) Poly(y*x**2 + (y**3 + y)*x + 1, x, domain='ZZ[y]') """ dom = self.domain if not dom.is_Numerical: raise DomainError(f"can't eject generators over {dom}") result = self.rep.copy() result = result.eject(*gens) return self.new(result, *result.ring.symbols) def terms_gcd(self): """ Remove GCD of terms from the polynomial ``self``. Examples ======== >>> (x**6*y**2 + x**3*y).as_poly().terms_gcd() ((3, 1), Poly(x**3*y + 1, x, y, domain='ZZ')) """ J, result = self.rep.terms_gcd() return J, self.per(result) def quo_ground(self, coeff): """ Quotient of ``self`` by a an element of the ground domain. Examples ======== >>> (2*x + 4).as_poly().quo_ground(2) Poly(x + 2, x, domain='ZZ') >>> (2*x + 3).as_poly().quo_ground(2) Poly(x + 1, x, domain='ZZ') """ result = self.rep.quo_ground(coeff) return self.per(result) def exquo_ground(self, coeff): """ Exact quotient of ``self`` by a an element of the ground domain. Examples ======== >>> (2*x + 4).as_poly().exquo_ground(2) Poly(x + 2, x, domain='ZZ') >>> (2*x + 3).as_poly().exquo_ground(2) Traceback (most recent call last): ... ExactQuotientFailed: 2 does not divide 3 in ZZ """ result = self.rep.exquo_ground(coeff) return self.per(result) def div(self, other, auto=True): """ Polynomial division with remainder of ``self`` by ``other``. Examples ======== >>> (x**2 + 1).as_poly().div((2*x - 4).as_poly()) (Poly(1/2*x + 1, x, domain='QQ'), Poly(5, x, domain='QQ')) >>> (x**2 + 1).as_poly().div((2*x - 4).as_poly(), auto=False) (Poly(0, x, domain='ZZ'), Poly(x**2 + 1, x, domain='ZZ')) """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True q, r = divmod(F, G) if retract: try: Q, R = q.set_domain(q.ring.domain.ring), r.set_domain(r.ring.domain.ring) except CoercionFailed: pass else: q, r = Q, R return per(q), per(r) def rem(self, other, auto=True): """ Computes the polynomial remainder of ``self`` by ``other``. Examples ======== >>> (x**2 + 1).as_poly().rem((2*x - 4).as_poly()) Poly(5, x, domain='ZZ') >>> (x**2 + 1).as_poly().rem((2*x - 4).as_poly(), auto=False) Poly(x**2 + 1, x, domain='ZZ') """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True r = F % G if retract: try: r = r.set_domain(r.ring.domain.ring) except CoercionFailed: pass return per(r) def quo(self, other, auto=True): """ Computes polynomial quotient of ``self`` by ``other``. Examples ======== >>> (x**2 + 1).as_poly().quo((2*x - 4).as_poly()) Poly(1/2*x + 1, x, domain='QQ') >>> (x**2 - 1).as_poly().quo((x - 1).as_poly()) Poly(x + 1, x, domain='ZZ') """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True q = F // G if retract: try: q = q.set_domain(q.ring.domain.ring) except CoercionFailed: pass return per(q) def exquo(self, other, auto=True): """ Computes polynomial exact quotient of ``self`` by ``other``. Examples ======== >>> (x**2 - 1).as_poly().exquo((x - 1).as_poly()) Poly(x + 1, x, domain='ZZ') >>> (x**2 + 1).as_poly().exquo((2*x - 4).as_poly()) Traceback (most recent call last): ... ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1 """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True try: q = F.exquo(G) except ExactQuotientFailed as exc: raise exc.new(self.as_expr(), other.as_expr()) if retract: try: q = q.set_domain(q.ring.domain.ring) except CoercionFailed: pass return per(q) def _gen_to_level(self, gen): """Returns level associated with the given generator.""" try: return self.rep.ring.index(gen) except ValueError: raise PolynomialError(f'a valid generator expected, got {gen}') def degree(self, gen=0): """ Returns degree of ``self`` in ``x_j``. The degree of 0 is negative floating-point infinity. Examples ======== >>> (x**2 + y*x + 1).as_poly().degree() 2 >>> (x**2 + y*x + y).as_poly().degree(y) 1 >>> Integer(0).as_poly(x).degree() -inf """ j = self._gen_to_level(gen) return self.rep.degree(j) def total_degree(self): """ Returns the total degree of ``self``. Examples ======== >>> (x**2 + y*x + 1).as_poly().total_degree() 2 >>> (x + y**5).as_poly().total_degree() 5 """ return self.rep.total_degree() def LC(self, order=None): """ Returns the leading coefficient of ``self``. Examples ======== >>> (4*x**3 + 2*x**2 + 3*x).as_poly().LC() 4 """ if order is not None: return self.coeffs(order)[0] result = self.rep.LC return self.domain.to_expr(result) def TC(self): """ Returns the trailing coefficient of ``self``. Examples ======== >>> (x**3 + 2*x**2 + 3*x).as_poly().TC() 0 """ result = self.rep[1] return self.domain.to_expr(result) def EC(self, order=None): """ Returns the last non-zero coefficient of ``self``. Examples ======== >>> (x**3 + 2*x**2 + 3*x).as_poly().EC() 3 """ EM = self.EM(order) return self.coeff_monomial(tuple(EM)) def coeff_monomial(self, monom): """ Returns the coefficient of ``monom`` in ``self`` if there, else None. Examples ======== >>> p = (24*x*y*exp(8) + 23*x).as_poly(greedy=False) >>> p.coeff_monomial(x) 23 >>> p.coeff_monomial(y) 0 >>> p.coeff_monomial(x*y) 24*E**8 >>> p.coeff_monomial((1, 1)) 24*E**8 Note that ``Expr.coeff()`` behaves differently, collecting terms if possible; the Poly must be converted to an Expr to use that method, however: >>> p.as_expr().coeff(x) 24*E**8*y + 23 >>> p.as_expr().coeff(y) 24*E**8*x >>> p.as_expr().coeff(x*y) 24*E**8 """ N = Monomial(monom, self.gens) if len(N) != len(self.gens): raise ValueError('exponent of each generator must be specified') result = self.rep[N] return self.domain.to_expr(result) def coeff(self, x, n=1, right=False): # the semantics of coeff_monomial and Expr.coeff are different; # if someone is working with a Poly, they should be aware of the # differences and chose the method best suited for the query. # Alternatively, a pure-polys method could be written here but # at this time the ``right`` keyword would be ignored because Poly # doesn't work with non-commutatives. raise NotImplementedError( 'Either convert to Expr with `as_expr` method ' "to use Expr's coeff method or else use the " '`coeff_monomial` method of Polys.') def LM(self, order=None): """ Returns the leading monomial of ``self``. The leading monomial signifies the the monomial having the highest power of the principal generator in the polynomial expression. Examples ======== >>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().LM() x**2*y**0 """ LM = (0,)*len(self.gens) if self.is_zero else self.monoms(order)[0] return Monomial(LM, self.gens) def EM(self, order=None): """ Returns the last non-zero monomial of ``self``. Examples ======== >>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().EM() x**0*y**1 """ EM = (0,)*len(self.gens) if self.is_zero else self.monoms(order)[-1] return Monomial(EM, self.gens) def LT(self, order=None): """ Returns the leading term of ``self``. The leading term signifies the term having the highest power of the principal generator in the polynomial expression. Examples ======== >>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().LT() (x**2*y**0, 4) """ LM = self.LM(order) return LM, self.coeff_monomial(tuple(LM)) def ET(self, order=None): """ Returns the last non-zero term of ``self``. Examples ======== >>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().ET() (x**0*y**1, 3) """ EM = self.EM(order) return EM, self.coeff_monomial(tuple(EM)) def clear_denoms(self, convert=False): """ Clear denominators, but keep the ground domain. Examples ======== >>> f = (x/2 + Rational(1, 3)).as_poly() >>> f.clear_denoms() (6, Poly(3*x + 2, x, domain='QQ')) >>> f.clear_denoms(convert=True) (6, Poly(3*x + 2, x, domain='ZZ')) """ dom = self.domain if convert and dom.has_assoc_Ring: dom = self.domain.ring coeff, result = self.rep.clear_denoms(convert=convert) f = self.per(result) if convert: f = f.set_domain(dom) return dom.to_expr(coeff), f def rat_clear_denoms(self, other): """ Clear denominators in a rational function ``self/other``. Examples ======== >>> f = (x**2/y + 1).as_poly(x) >>> g = (x**3 + y).as_poly(x) >>> p, q = f.rat_clear_denoms(g) >>> p Poly(x**2 + y, x, domain='ZZ[y]') >>> q Poly(y*x**3 + y**2, x, domain='ZZ[y]') """ f, g = self, other dom, per, f, g = f._unify(g) f = per(f) g = per(g) if not (dom.is_Field and dom.has_assoc_Ring): return f, g a, f = f.clear_denoms(convert=True) b, g = g.clear_denoms(convert=True) f *= b g *= a return f, g def integrate(self, *specs, **args): """ Computes indefinite integral of ``self``. Examples ======== >>> (x**2 + 2*x + 1).as_poly().integrate() Poly(1/3*x**3 + x**2 + x, x, domain='QQ') >>> (x*y**2 + x).as_poly().integrate((0, 1), (1, 0)) Poly(1/2*x**2*y**2 + 1/2*x**2, x, y, domain='QQ') """ f = self if args.get('auto', True) and f.domain.is_Ring: f = f.to_field() if not specs: return f.per(f.rep.integrate(m=1)) rep = f.rep for spec in specs: if type(spec) is tuple: gen, m = spec else: gen, m = spec, 1 rep = rep.integrate(f._gen_to_level(gen), int(m)) return f.per(rep) def _eval_derivative(self, v): rep = self.rep v = self._gen_to_level(v) rep = rep.diff(v) return self.per(rep) def eval(self, x, a=None, auto=True): """ Evaluate ``self`` at ``a`` in the given variable. Examples ======== >>> (x**2 + 2*x + 3).as_poly().eval(2) 11 >>> (2*x*y + 3*x + y + 2).as_poly().eval(x, 2) Poly(5*y + 8, y, domain='ZZ') >>> f = (2*x*y + 3*x + y + 2*z).as_poly() >>> f.eval({x: 2}) Poly(5*y + 2*z + 6, y, z, domain='ZZ') >>> f.eval({x: 2, y: 5}) Poly(2*z + 31, z, domain='ZZ') >>> f.eval({x: 2, y: 5, z: 7}) 45 >>> f.eval((2, 5)) Poly(2*z + 31, z, domain='ZZ') >>> f(2, 5) Poly(2*z + 31, z, domain='ZZ') """ f = self if a is None: if isinstance(x, dict): mapping = x for gen, value in mapping.items(): f = f.eval(gen, value) return f elif isinstance(x, (tuple, list)): values = x if len(values) > len(f.gens): raise ValueError('too many values provided') for gen, value in zip(f.gens, values): f = f.eval(gen, value) return f else: j, a = 0, x else: j = f._gen_to_level(x) try: result = f.rep.eval(j, a) except CoercionFailed: if not auto: raise DomainError(f"can't evaluate at {a} in {f.domain}") else: a_domain, [a] = construct_domain([a]) new_domain = f.domain.unify(a_domain, f.gens) f = f.set_domain(new_domain) a = new_domain.convert(a, a_domain) result = f.rep.eval(j, a) return f.per(result, remove=j) def __call__(self, *values): """ Evaluate ``self`` at the give values. Examples ======== >>> f = (2*x*y + 3*x + y + 2*z).as_poly() >>> f(2) Poly(5*y + 2*z + 6, y, z, domain='ZZ') >>> f(2, 5) Poly(2*z + 31, z, domain='ZZ') >>> f(2, 5, 7) 45 """ return self.eval(values) def half_gcdex(self, other, auto=True): """ Half extended Euclidean algorithm of ``self`` and ``other``. Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``. Examples ======== >>> f = (x**4 - 2*x**3 - 6*x**2 + 12*x + 15).as_poly() >>> g = (x**3 + x**2 - 4*x - 4).as_poly() >>> f.half_gcdex(g) (Poly(-1/5*x + 3/5, x, domain='QQ'), Poly(x + 1, x, domain='QQ')) """ dom, per, F, G = self._unify(other) if auto and dom.is_Ring: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) s, h = F.half_gcdex(G) return per(s), per(h) def gcdex(self, other, auto=True): """ Extended Euclidean algorithm of ``self`` and ``other``. Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``. Examples ======== >>> f = (x**4 - 2*x**3 - 6*x**2 + 12*x + 15).as_poly() >>> g = (x**3 + x**2 - 4*x - 4).as_poly() >>> f.gcdex(g) (Poly(-1/5*x + 3/5, x, domain='QQ'), Poly(1/5*x**2 - 6/5*x + 2, x, domain='QQ'), Poly(x + 1, x, domain='QQ')) """ dom, per, F, G = self._unify(other) if auto and dom.is_Ring: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) s, t, h = F.gcdex(G) return per(s), per(t), per(h) def invert(self, other, auto=True): """ Invert ``self`` modulo ``other`` when possible. Examples ======== >>> (x**2 - 1).as_poly().invert((2*x - 1).as_poly()) Poly(-4/3, x, domain='QQ') >>> (x**2 - 1).as_poly().invert((x - 1).as_poly()) Traceback (most recent call last): ... NotInvertible: zero divisor """ dom, per, F, G = self._unify(other) if auto and dom.is_Ring: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) result = F.ring.invert(F, G) return per(result) def subresultants(self, other): """ Computes the subresultant PRS of ``self`` and ``other``. Examples ======== >>> (x**2 + 1).as_poly().subresultants((x**2 - 1).as_poly()) [Poly(x**2 + 1, x, domain='ZZ'), Poly(x**2 - 1, x, domain='ZZ'), Poly(-2, x, domain='ZZ')] """ _, per, F, G = self._unify(other) result = F.subresultants(G) return list(map(per, result)) def resultant(self, other, includePRS=False): """ Computes the resultant of ``self`` and ``other`` via PRS. If includePRS=True, it includes the subresultant PRS in the result. Because the PRS is used to calculate the resultant, this is more efficient than calling :func:`subresultants` separately. Examples ======== >>> f = (x**2 + 1).as_poly() >>> f.resultant((x**2 - 1).as_poly()) 4 >>> f.resultant((x**2 - 1).as_poly(), includePRS=True) (4, [Poly(x**2 + 1, x, domain='ZZ'), Poly(x**2 - 1, x, domain='ZZ'), Poly(-2, x, domain='ZZ')]) """ _, per, F, G = self._unify(other) if includePRS: result, R = F.resultant(G, includePRS=includePRS) return per(result, remove=0), list(map(per, R)) else: result = F.resultant(G) return per(result, remove=0) def discriminant(self): """ Computes the discriminant of ``self``. Examples ======== >>> (x**2 + 2*x + 3).as_poly().discriminant() -8 """ result = self.rep.discriminant() return self.per(result, remove=0) def dispersionset(self, other=None): r"""Compute the *dispersion set* of two polynomials. Examples ======== >>> ((x - 3)*(x + 3)).as_poly().dispersionset() {0, 6} """ f = self.rep ring = f.ring g = other.rep if other is not None else other return {ZZ.to_expr(i) for i in ring.dispersionset(f, g)} def cofactors(self, other): """ Returns the GCD of ``self`` and ``other`` and their cofactors. For two polynomials ``f`` and ``g`` it returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and ``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors of ``f`` and ``g``. Examples ======== >>> (x**2 - 1).as_poly().cofactors((x**2 - 3*x + 2).as_poly()) (Poly(x - 1, x, domain='ZZ'), Poly(x + 1, x, domain='ZZ'), Poly(x - 2, x, domain='ZZ')) """ _, per, F, G = self._unify(other) h, cff, cfg = F.cofactors(G) return per(h), per(cff), per(cfg) def gcd(self, other): """ Returns the polynomial GCD of ``self`` and ``other``. Examples ======== >>> (x**2 - 1).as_poly().gcd((x**2 - 3*x + 2).as_poly()) Poly(x - 1, x, domain='ZZ') """ _, per, F, G = self._unify(other) result = F.gcd(G) return per(result) def lcm(self, other): """ Returns polynomial LCM of ``self`` and ``other``. Examples ======== >>> (x**2 - 1).as_poly().lcm((x**2 - 3*x + 2).as_poly()) Poly(x**3 - 2*x**2 - x + 2, x, domain='ZZ') """ _, per, F, G = self._unify(other) result = F.lcm(G) return per(result) def trunc(self, p): """ Reduce ``self`` modulo a constant ``p``. Examples ======== >>> (2*x**3 + 3*x**2 + 5*x + 7).as_poly().trunc(3) Poly(-x**3 - x + 1, x, domain='ZZ') """ p = self.domain.convert(p) result = self.rep.trunc_ground(p) return self.per(result) def monic(self, auto=True): """ Divides all coefficients by ``LC(f)``. Examples ======== >>> (3*x**2 + 6*x + 9).as_poly().monic() Poly(x**2 + 2*x + 3, x, domain='QQ') >>> (3*x**2 + 4*x + 2).as_poly().monic() Poly(x**2 + 4/3*x + 2/3, x, domain='QQ') """ f = self if auto and f.domain.is_Ring: f = f.to_field() result = f.rep.monic() return f.per(result) def content(self): """ Returns the GCD of polynomial coefficients. Examples ======== >>> (6*x**2 + 8*x + 12).as_poly().content() 2 """ result = self.rep.content() return self.domain.to_expr(result) def primitive(self): """ Returns the content and a primitive form of ``self``. Examples ======== >>> (2*x**2 + 8*x + 12).as_poly().primitive() (2, Poly(x**2 + 4*x + 6, x, domain='ZZ')) """ cont, result = self.rep.primitive() return self.domain.to_expr(cont), self.per(result) def compose(self, other): """ Computes the functional composition of ``self`` and ``other``. Examples ======== >>> (x**2 + x).as_poly().compose((x - 1).as_poly()) Poly(x**2 - x, x, domain='ZZ') """ _, per, F, G = self._unify(other) result = F.compose(G.ring.gens[0], G) return per(result) def decompose(self): """ Computes a functional decomposition of ``self``. Examples ======== >>> (x**4 + 2*x**3 - x - 1).as_poly().decompose() [Poly(x**2 - x - 1, x, domain='ZZ'), Poly(x**2 + x, x, domain='ZZ')] """ result = self.rep.decompose() return list(map(self.per, result)) def shift(self, a): """ Efficiently compute Taylor shift ``f(x + a)``. Examples ======== >>> (x**2 - 2*x + 1).as_poly().shift(2) Poly(x**2 + 2*x + 1, x, domain='ZZ') """ result = self.rep.shift(a) return self.per(result) def sqf_norm(self): """ Computes square-free norm of ``self``. Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and ``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``, where ``a`` is the algebraic extension of the ground domain. Examples ======== >>> s, f, r = (x**2 + 1).as_poly(extension=[sqrt(3)]).sqf_norm() >>> s 1 >>> f Poly(x**2 - 2*sqrt(3)*x + 4, x, domain='QQ<sqrt(3)>') >>> r Poly(x**4 - 4*x**2 + 16, x, domain='QQ') """ s, g, r = self.rep.sqf_norm() return s, self.per(g), self.per(r) def sqf_part(self): """ Computes square-free part of ``self``. Examples ======== >>> (x**3 - 3*x - 2).as_poly().sqf_part() Poly(x**2 - x - 2, x, domain='ZZ') """ result = self.rep.sqf_part() return self.per(result) def sqf_list(self): """ Returns a list of square-free factors of ``self``. Examples ======== >>> f = (2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16).as_poly() >>> f.sqf_list() (2, [(Poly(x + 1, x, domain='ZZ'), 2), (Poly(x + 2, x, domain='ZZ'), 3)]) """ coeff, factors = self.rep.sqf_list() return (self.domain.to_expr(coeff), [(self.per(g), k) for g, k in factors]) def factor_list(self): """ Returns a list of irreducible factors of ``self``. Examples ======== >>> f = (2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y).as_poly() >>> f.factor_list() (2, [(Poly(x + y, x, y, domain='ZZ'), 1), (Poly(x**2 + 1, x, y, domain='ZZ'), 2)]) """ try: coeff, factors = self.rep.factor_list() except DomainError: return Integer(1), [(self, 1)] return (self.domain.to_expr(coeff), [(self.per(g), k) for g, k in factors]) def count_roots(self, inf=None, sup=None): """ Return the number of roots of ``self`` in ``[inf, sup]`` interval. Examples ======== >>> (x**4 - 4).as_poly().count_roots(-3, 3) 2 >>> (x**4 - 4).as_poly().count_roots(0, 1 + 3*I) 1 """ inf_real, sup_real = True, True if inf is not None: inf = sympify(inf) if inf == -oo: inf = None else: re, im = inf.as_real_imag() if not im: inf = QQ.convert(inf) else: inf, inf_real = tuple(map(QQ.convert, (re, im))), False if sup is not None: sup = sympify(sup) if sup is oo: sup = None else: re, im = sup.as_real_imag() if not im: sup = QQ.convert(sup) else: sup, sup_real = tuple(map(QQ.convert, (re, im))), False if inf_real and sup_real: count = self.rep.ring._count_real_roots(self.rep, inf=inf, sup=sup) else: if inf_real and inf is not None: inf = (inf, QQ.zero) if sup_real and sup is not None: sup = (sup, QQ.zero) count = self.rep.ring._count_complex_roots(self.rep, inf=inf, sup=sup) return Integer(count) def root(self, index, radicals=True): """ Get an indexed root of a polynomial. Examples ======== >>> f = (2*x**3 - 7*x**2 + 4*x + 4).as_poly() >>> f.root(0) -1/2 >>> f.root(1) 2 >>> f.root(2) 2 >>> f.root(3) Traceback (most recent call last): ... IndexError: root index out of [-3, 2] range, got 3 >>> (x**5 + x + 1).as_poly().root(0) RootOf(x**3 - x**2 + 1, 0) """ from .rootoftools import RootOf return RootOf(self, index, radicals=radicals) def real_roots(self, multiple=True, radicals=True): """ Return a list of real roots with multiplicities. Examples ======== >>> (2*x**3 - 7*x**2 + 4*x + 4).as_poly().real_roots() [-1/2, 2, 2] >>> (x**3 + x + 1).as_poly().real_roots() [RootOf(x**3 + x + 1, 0)] """ from .rootoftools import RootOf reals = RootOf.real_roots(self, radicals=radicals) if multiple: return reals else: return group(reals, multiple=False) def all_roots(self, multiple=True, radicals=True): """ Return a list of real and complex roots with multiplicities. Examples ======== >>> (2*x**3 - 7*x**2 + 4*x + 4).as_poly().all_roots() [-1/2, 2, 2] >>> (x**3 + x + 1).as_poly().all_roots() [RootOf(x**3 + x + 1, 0), RootOf(x**3 + x + 1, 1), RootOf(x**3 + x + 1, 2)] """ from .rootoftools import RootOf roots = RootOf.all_roots(self, radicals=radicals) if multiple: return roots else: return group(roots, multiple=False) def nroots(self, n=15, maxsteps=50, cleanup=True): """ Compute numerical approximations of roots of ``self``. Parameters ========== n ... the number of digits to calculate maxsteps ... the maximum number of iterations to do If the accuracy `n` cannot be reached in `maxsteps`, it will raise an exception. You need to rerun with higher maxsteps. Examples ======== >>> (x**2 - 3).as_poly().nroots(n=15) [-1.73205080756888, 1.73205080756888] >>> (x**2 - 3).as_poly().nroots(n=30) [-1.73205080756887729352744634151, 1.73205080756887729352744634151] """ if self.is_multivariate: raise MultivariatePolynomialError( f"can't compute numerical roots of {self}") if self.degree() <= 0: return [] # For integer and rational coefficients, convert them to integers only # (for accuracy). Otherwise just try to convert the coefficients to # mpmath.mpc and raise an exception if the conversion fails. if self.domain is ZZ: coeffs = [int(coeff) for coeff in self.all_coeffs()] elif self.domain is QQ: denoms = [coeff.denominator for coeff in self.all_coeffs()] fac = math.lcm(*denoms) coeffs = [int(coeff*fac) for coeff in self.all_coeffs()] else: coeffs = [coeff.evalf(n, strict=False).as_real_imag() for coeff in self.all_coeffs()] try: coeffs = [mpmath.mpc(*coeff) for coeff in coeffs] except TypeError: raise DomainError(f'Numerical domain expected, got {self.domain}') dps = mpmath.mp.dps mpmath.mp.dps = n try: # We need to add extra precision to guard against losing accuracy. # 10 times the degree of the polynomial seems to work well. roots = mpmath.polyroots(list(reversed(coeffs)), maxsteps=maxsteps, cleanup=cleanup, error=False, extraprec=self.degree()*10) # Mpmath puts real roots first, then complex ones (as does all_roots) # so we make sure this convention holds here, too. roots = list(map(sympify, sorted(roots, key=lambda r: (1 if r.imag else 0, r.real, r.imag)))) except mpmath.libmp.NoConvergence: raise mpmath.libmp.NoConvergence( f'convergence to root failed; try n < {n} or maxsteps > {maxsteps}') finally: mpmath.mp.dps = dps return roots def cancel(self, other, include=False): """ Cancel common factors in a rational function ``self/other``. Examples ======== >>> (2*x**2 - 2).as_poly().cancel((x**2 - 2*x + 1).as_poly()) (1, Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ')) >>> (2*x**2 - 2).as_poly().cancel((x**2 - 2*x + 1).as_poly(), include=True) (Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ')) """ dom, per, F, G = self._unify(other) result = F.cancel(G, include=include) if not include: if dom.has_assoc_Ring: dom = dom.ring cp, cq, p, q = result cp = dom.to_expr(cp) cq = dom.to_expr(cq) return cp/cq, per(p), per(q) else: return tuple(map(per, result)) @property def is_zero(self): """ Returns ``True`` if ``self`` is a zero polynomial. Examples ======== >>> Integer(0).as_poly(x).is_zero True >>> Integer(1).as_poly(x).is_zero False """ return not self.rep @property def is_one(self): """ Returns ``True`` if ``self`` is a unit polynomial. Examples ======== >>> Integer(0).as_poly(x).is_one False >>> Integer(1).as_poly(x).is_one True """ return self.rep == 1 @property def is_squarefree(self): """ Returns ``True`` if ``self`` is a square-free polynomial. Examples ======== >>> (x**2 - 2*x + 1).as_poly().is_squarefree False >>> (x**2 - 1).as_poly().is_squarefree True """ return self.rep.is_squarefree @property def is_ground(self): """ Returns ``True`` if ``self`` is an element of the ground domain. Examples ======== >>> x.as_poly().is_ground False >>> Integer(2).as_poly(x).is_ground True >>> y.as_poly(x).is_ground True """ return self.rep.is_ground @property def is_linear(self): """ Returns ``True`` if ``self`` is linear in all its variables. Examples ======== >>> (x + y + 2).as_poly().is_linear True >>> (x*y + 2).as_poly().is_linear False """ return self.rep.is_linear @property def is_quadratic(self): """ Returns ``True`` if ``self`` is quadratic in all its variables. Examples ======== >>> (x*y + 2).as_poly().is_quadratic True >>> (x*y**2 + 2).as_poly().is_quadratic False """ return self.rep.is_quadratic @property def is_term(self): """ Returns ``True`` if ``self`` is zero or has only one term. Examples ======== >>> (3*x**2).as_poly().is_term True >>> (3*x**2 + 1).as_poly().is_term False """ return self.rep.is_term @property def is_homogeneous(self): """ Returns ``True`` if ``self`` is a homogeneous polynomial. A homogeneous polynomial is a polynomial whose all monomials with non-zero coefficients have the same total degree. Examples ======== >>> (x**2 + x*y).as_poly().is_homogeneous True >>> (x**3 + x*y).as_poly().is_homogeneous False """ return self.rep.is_homogeneous @property def is_irreducible(self): """ Returns ``True`` if ``self`` has no factors over its domain. Examples ======== >>> (x**2 + x + 1).as_poly(modulus=2).is_irreducible True >>> (x**2 + 1).as_poly(modulus=2).is_irreducible False """ return self.rep.is_irreducible @property def is_univariate(self): """ Returns ``True`` if ``self`` is a univariate polynomial. Examples ======== >>> (x**2 + x + 1).as_poly().is_univariate True >>> (x*y**2 + x*y + 1).as_poly().is_univariate False >>> (x*y**2 + x*y + 1).as_poly(x).is_univariate True >>> (x**2 + x + 1).as_poly(x, y).is_univariate False """ return len(self.gens) == 1 @property def is_multivariate(self): """ Returns ``True`` if ``self`` is a multivariate polynomial. Examples ======== >>> (x**2 + x + 1).as_poly().is_multivariate False >>> (x*y**2 + x*y + 1).as_poly().is_multivariate True >>> (x*y**2 + x*y + 1).as_poly(x).is_multivariate False >>> (x**2 + x + 1).as_poly(x, y).is_multivariate True """ return len(self.gens) != 1 @property def is_cyclotomic(self): """ Returns ``True`` if ``self`` is a cyclotomic polynomial. Examples ======== >>> f = (x**16 + x**14 - x**10 + x**8 - x**6 + x**2 + 1).as_poly() >>> f.is_cyclotomic False >>> g = (x**16 + x**14 - x**10 - x**8 - x**6 + x**2 + 1).as_poly() >>> g.is_cyclotomic True """ return self.rep.is_cyclotomic def __abs__(self): """ Make all coefficients in ``self`` positive. Examples ======== >>> abs((x**2 - 1).as_poly()) Poly(x**2 + 1, x, domain='ZZ') """ result = abs(self.rep) return self.per(result) def __neg__(self): """ Negate all coefficients in ``self``. Examples ======== >>> -(x**2 - 1).as_poly() Poly(-x**2 + 1, x, domain='ZZ') """ result = -self.rep return self.per(result) @_sympifyit('other', NotImplemented) def __add__(self, other): if not other.is_Poly: try: other = self.__class__(other, *self.gens) except PolynomialError: return self.as_expr() + other _, per, F, G = self._unify(other) result = F + G return per(result) @_sympifyit('other', NotImplemented) def __radd__(self, other): try: other = self.__class__(other, *self.gens) except PolynomialError: return other + self.as_expr() return other + self @_sympifyit('other', NotImplemented) def __sub__(self, other): if not other.is_Poly: try: other = self.__class__(other, *self.gens) except PolynomialError: return self.as_expr() - other _, per, F, G = self._unify(other) result = F - G return per(result) @_sympifyit('other', NotImplemented) def __rsub__(self, other): try: other = self.__class__(other, *self.gens) except PolynomialError: return other - self.as_expr() return other - self @_sympifyit('other', NotImplemented) def __mul__(self, other): if not other.is_Poly: try: other = self.__class__(other, *self.gens) except PolynomialError: return self.as_expr()*other _, per, F, G = self._unify(other) result = F * G return per(result) @_sympifyit('other', NotImplemented) def __rmul__(self, other): try: other = self.__class__(other, *self.gens) except PolynomialError: return other*self.as_expr() return other*self def __pow__(self, n, mod=None): if mod: mod = sympify(mod, strict=True) n = sympify(n) if n.is_Integer and n >= 0: n = int(n) result = pow(self.rep, n, mod.rep if mod else mod) return self.per(result) else: r = self.as_expr()**n return r % mod if mod else r @_sympifyit('other', NotImplemented) def __divmod__(self, other): if not other.is_Poly: other = self.__class__(other, *self.gens) return self.div(other) @_sympifyit('other', NotImplemented) def __rdivmod__(self, other): other = self.__class__(other, *self.gens) return other.div(self) @_sympifyit('other', NotImplemented) def __mod__(self, other): if not other.is_Poly: other = self.__class__(other, *self.gens) return self.rem(other) @_sympifyit('other', NotImplemented) def __rmod__(self, other): other = self.__class__(other, *self.gens) return other.rem(self) @_sympifyit('other', NotImplemented) def __floordiv__(self, other): if not other.is_Poly: other = self.__class__(other, *self.gens) return self.quo(other) @_sympifyit('other', NotImplemented) def __rfloordiv__(self, other): other = self.__class__(other, *self.gens) return other.quo(self) @_sympifyit('other', NotImplemented) def __truediv__(self, other): return self.as_expr()/other.as_expr() @_sympifyit('other', NotImplemented) def __eq__(self, other): f, g = self, other if not g.is_Poly: try: g = f.__class__(g, *f.gens, domain=f.domain) except (PolynomialError, DomainError, CoercionFailed): return False if f.gens != g.gens: return False if f.domain != g.domain: try: dom = f.domain.unify(g.domain, f.gens) except UnificationFailed: # pragma: no cover return NotImplemented f = f.set_domain(dom) g = g.set_domain(dom) return f.rep == g.rep def __bool__(self): return not self.is_zero class PurePoly(Poly): """Class for representing pure polynomials.""" def _hashable_content(self): """Allow Diofant to hash Poly instances.""" return self.domain, frozenset(self.rep.items()) def __hash__(self): return super().__hash__() @property def free_symbols(self): """ Free symbols of a polynomial. Examples ======== >>> PurePoly(x**2 + 1).free_symbols set() >>> PurePoly(x**2 + y).free_symbols set() >>> PurePoly(x**2 + y, x).free_symbols {y} """ return self.free_symbols_in_domain @_sympifyit('other', NotImplemented) def __eq__(self, other): f, g = self, other if not g.is_Poly: try: g = f.__class__(g, *f.gens, domain=f.domain) except (PolynomialError, DomainError, CoercionFailed): return False if len(f.gens) != len(g.gens): return False if f.domain != g.domain: try: dom = f.domain.unify(g.domain, f.gens) except UnificationFailed: # pragma: no cover return NotImplemented f = f.set_domain(dom) g = g.set_domain(dom) return f.rep.items() == g.rep.items() def _unify(self, other): other = sympify(other) if not other.is_Poly: try: return (self.domain, self.per, self.rep, self.rep.ring(self.domain.convert(other))) except CoercionFailed: raise UnificationFailed(f"can't unify {self} with {other}") if len(self.gens) != len(other.gens): raise UnificationFailed(f"can't unify {self} with {other}") newring = self.rep.ring.unify(other.rep.ring) gens = newring.symbols F, G = self.rep.set_ring(newring), other.rep.set_ring(newring) cls = self.__class__ dom = newring.domain def per(rep, dom=dom, gens=gens, remove=None): if remove is not None: gens = gens[:remove] + gens[remove + 1:] if not gens: return dom.to_expr(rep) return cls.new(rep, *gens) return dom, per, F, G def parallel_poly_from_expr(exprs, *gens, **args): """Construct polynomials from expressions.""" from ..functions import Piecewise opt = build_options(gens, args) origs, exprs = list(exprs), [] _exprs, _polys, _failed = [], [], [] if not origs and not opt.gens: raise PolificationFailed(opt, origs, exprs, True) for i, expr in enumerate(origs): expr = sympify(expr) if isinstance(expr, Basic): if expr.is_Poly: _polys.append(i) expr = expr.__class__._from_poly(expr, opt) else: if opt.expand: expr = expr.expand() try: expr = Poly._from_expr(expr, opt) _exprs.append(i) except GeneratorsNeeded: _failed.append(i) else: raise PolificationFailed(opt, origs, exprs, True) exprs.append(expr) if opt.polys is None: opt.polys = bool(_polys) _exprs += _polys for i, j in zip(_exprs, _exprs[1:]): exprs[i], exprs[j] = exprs[i].unify(exprs[j]) if _exprs: i = _exprs[-1] opt.gens = exprs[i].gens for i in _failed: try: exprs[i] = Poly._from_expr(exprs[i], opt) except GeneratorsNeeded: raise PolificationFailed(opt, origs, exprs, True) if opt.domain is None: opt.domain = ZZ _exprs += _failed if _exprs: for i, j in zip(_exprs, _exprs[1:]): exprs[i], exprs[j] = exprs[i].unify(exprs[j]) i = _exprs[-1] opt.domain = exprs[i].domain cls = exprs[i].func for i, expr in enumerate(exprs): exprs[i] = cls._from_poly(expr, opt) for k in opt.gens: if isinstance(k, Piecewise) and len(exprs) > 1: raise PolynomialError('Piecewise generators do not make sense') return exprs, opt def degree(f, *gens, **args): """ Return the degree of ``f`` in the given variable. The degree of 0 is negative infinity. Examples ======== >>> degree(x**2 + y*x + 1, gen=x) 2 >>> degree(x**2 + y*x + 1, gen=y) 1 >>> degree(0, x) -inf """ allowed_flags(args, ['gen', 'polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('degree', 1, exc) return sympify(F.degree(opt.gen)) def LC(f, *gens, **args): """ Return the leading coefficient of ``f``. Examples ======== >>> LC(4*x**2 + 2*x*y**2 + x*y + 3*y) 4 """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('LC', 1, exc) return F.LC(order=opt.order) def LM(f, *gens, **args): """ Return the leading monomial of ``f``. Examples ======== >>> LM(4*x**2 + 2*x*y**2 + x*y + 3*y) x**2 """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('LM', 1, exc) monom = F.LM(order=opt.order) return monom.as_expr() def LT(f, *gens, **args): """ Return the leading term of ``f``. Examples ======== >>> LT(4*x**2 + 2*x*y**2 + x*y + 3*y) 4*x**2 """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('LT', 1, exc) monom, coeff = F.LT(order=opt.order) return coeff*monom.as_expr() def div(f, g, *gens, **args): """ Compute polynomial division of ``f`` and ``g``. Examples ======== >>> div(x**2 + 1, 2*x - 4, field=False) (0, x**2 + 1) >>> div(x**2 + 1, 2*x - 4) (x/2 + 1, 5) """ allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('div', 2, exc) q, r = F.div(G, auto=opt.auto) if not opt.polys: return q.as_expr(), r.as_expr() else: return q, r def rem(f, g, *gens, **args): """ Compute polynomial remainder of ``f`` and ``g``. Examples ======== >>> rem(x**2 + 1, 2*x - 4, field=False) x**2 + 1 >>> rem(x**2 + 1, 2*x - 4) 5 """ allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('rem', 2, exc) r = F.rem(G, auto=opt.auto) if not opt.polys: return r.as_expr() else: return r def quo(f, g, *gens, **args): """ Compute polynomial quotient of ``f`` and ``g``. Examples ======== >>> quo(x**2 + 1, 2*x - 4) x/2 + 1 >>> quo(x**2 - 1, x - 1) x + 1 """ allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('quo', 2, exc) q = F.quo(G, auto=opt.auto) if not opt.polys: return q.as_expr() else: return q def exquo(f, g, *gens, **args): """ Compute polynomial exact quotient of ``f`` and ``g``. Examples ======== >>> exquo(x**2 - 1, x - 1) x + 1 >>> exquo(x**2 + 1, 2*x - 4) Traceback (most recent call last): ... ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1 """ allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('exquo', 2, exc) q = F.exquo(G, auto=opt.auto) if not opt.polys: return q.as_expr() else: return q def half_gcdex(f, g, *gens, **args): """ Half extended Euclidean algorithm of ``f`` and ``g``. Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``. Examples ======== >>> half_gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4) (-x/5 + 3/5, x + 1) """ allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) s, h = domain.half_gcdex(a, b) return domain.to_expr(s), domain.to_expr(h) s, h = F.half_gcdex(G, auto=opt.auto) if not opt.polys: return s.as_expr(), h.as_expr() else: return s, h def gcdex(f, g, *gens, **args): """ Extended Euclidean algorithm of ``f`` and ``g``. Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``. Examples ======== >>> gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4) (-x/5 + 3/5, x**2/5 - 6*x/5 + 2, x + 1) """ allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) s, t, h = domain.gcdex(a, b) return domain.to_expr(s), domain.to_expr(t), domain.to_expr(h) s, t, h = F.gcdex(G, auto=opt.auto) if not opt.polys: return s.as_expr(), t.as_expr(), h.as_expr() else: return s, t, h def invert(f, g, *gens, **args): """ Invert ``f`` modulo ``g`` when possible. Examples ======== >>> invert(x**2 - 1, 2*x - 1) -4/3 >>> invert(x**2 - 1, x - 1) Traceback (most recent call last): ... NotInvertible: zero divisor For more efficient inversion of Rationals, use the ``mod_inverse`` function: >>> mod_inverse(3, 5) 2 >>> (Integer(2)/5).invert(Integer(7)/3) 5/2 See Also ======== diofant.core.numbers.mod_inverse """ allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) return domain.to_expr(domain.invert(a, b)) h = F.invert(G, auto=opt.auto) if not opt.polys: return h.as_expr() else: return h def subresultants(f, g, *gens, **args): """ Compute subresultant PRS of ``f`` and ``g``. Examples ======== >>> subresultants(x**2 + 1, x**2 - 1) [x**2 + 1, x**2 - 1, -2] """ allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('subresultants', 2, exc) result = F.subresultants(G) if not opt.polys: return [r.as_expr() for r in result] else: return result def resultant(f, g, *gens, **args): """ Compute resultant of ``f`` and ``g``. Examples ======== >>> resultant(x**2 + 1, x**2 - 1) 4 """ includePRS = args.pop('includePRS', False) allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('resultant', 2, exc) if includePRS: result, R = F.resultant(G, includePRS=includePRS) else: result = F.resultant(G) if not opt.polys: if includePRS: return result.as_expr(), [r.as_expr() for r in R] return result.as_expr() else: if includePRS: return result, R return result def discriminant(f, *gens, **args): """ Compute discriminant of ``f``. Examples ======== >>> discriminant(x**2 + 2*x + 3) -8 """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('discriminant', 1, exc) result = F.discriminant() if not opt.polys: return result.as_expr() else: return result def cofactors(f, g, *gens, **args): """ Compute GCD and cofactors of ``f`` and ``g``. Returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and ``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors of ``f`` and ``g``. Examples ======== >>> cofactors(x**2 - 1, x**2 - 3*x + 2) (x - 1, x + 1, x - 2) """ allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) h, cff, cfg = domain.cofactors(a, b) return tuple(map(domain.to_expr, (h, cff, cfg))) h, cff, cfg = F.cofactors(G) if not opt.polys: return h.as_expr(), cff.as_expr(), cfg.as_expr() else: return h, cff, cfg def gcd(f, g, *gens, **args): """ Compute GCD of ``f`` and ``g``. Examples ======== >>> gcd(x**2 - 1, x**2 - 3*x + 2) x - 1 """ allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) return domain.to_expr(domain.gcd(a, b)) result = F.gcd(G) if not opt.polys: return result.as_expr() else: return result def lcm(f, g, *gens, **args): """ Compute LCM of ``f`` and ``g``. Examples ======== >>> lcm(x**2 - 1, x**2 - 3*x + 2) x**3 - 2*x**2 - x + 2 """ allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) return domain.to_expr(domain.lcm(a, b)) result = F.lcm(G) if not opt.polys: return result.as_expr() else: return result def terms_gcd(f, *gens, **args): """ Remove GCD of terms from ``f``. If the ``deep`` flag is True, then the arguments of ``f`` will have terms_gcd applied to them. If a fraction is factored out of ``f`` and ``f`` is an Add, then an unevaluated Mul will be returned so that automatic simplification does not redistribute it. The hint ``clear``, when set to False, can be used to prevent such factoring when all coefficients are not fractions. Examples ======== >>> terms_gcd(x**6*y**2 + x**3*y) x**3*y*(x**3*y + 1) The default action of polys routines is to expand the expression given to them. terms_gcd follows this behavior: >>> terms_gcd((3+3*x)*(x+x*y)) 3*x*(x*y + x + y + 1) If this is not desired then the hint ``expand`` can be set to False. In this case the expression will be treated as though it were comprised of one or more terms: >>> terms_gcd((3+3*x)*(x+x*y), expand=False) (3*x + 3)*(x*y + x) In order to traverse factors of a Mul or the arguments of other functions, the ``deep`` hint can be used: >>> terms_gcd((3 + 3*x)*(x + x*y), expand=False, deep=True) 3*x*(x + 1)*(y + 1) >>> terms_gcd(cos(x + x*y), deep=True) cos(x*(y + 1)) Rationals are factored out by default: >>> terms_gcd(x + y/2) (2*x + y)/2 Only the y-term had a coefficient that was a fraction; if one does not want to factor out the 1/2 in cases like this, the flag ``clear`` can be set to False: >>> terms_gcd(x + y/2, clear=False) x + y/2 >>> terms_gcd(x*y/2 + y**2, clear=False) y*(x/2 + y) The ``clear`` flag is ignored if all coefficients are fractions: >>> terms_gcd(x/3 + y/2, clear=False) (2*x + 3*y)/6 See Also ======== diofant.core.exprtools.gcd_terms, diofant.core.exprtools.factor_terms """ from ..core import Equality orig = sympify(f) if not isinstance(f, Expr) or f.is_Atom: return orig if args.get('deep', False): new = f.func(*[terms_gcd(a, *gens, **args) for a in f.args]) args.pop('deep') args['expand'] = False return terms_gcd(new, *gens, **args) if isinstance(f, Equality): return f clear = args.pop('clear', True) allowed_flags(args, ['polys']) (F,), opt = parallel_poly_from_expr((f,), *gens, **args) J, f = F.terms_gcd() if opt.domain.is_Field: denom, f = f.clear_denoms(convert=True) coeff, f = f.primitive() if opt.domain.is_Field: coeff /= denom term = Mul(*[x**j for x, j in zip(f.gens, J)]) if coeff == 1: coeff = Integer(1) if term == 1: return orig if clear: return _keep_coeff(coeff, term*f.as_expr()) # base the clearing on the form of the original expression, not # the (perhaps) Mul that we have now coeff, f = _keep_coeff(coeff, f.as_expr(), clear=False).as_coeff_Mul() return _keep_coeff(coeff, term*f, clear=False) def trunc(f, p, *gens, **args): """ Reduce ``f`` modulo a constant ``p``. Examples ======== >>> trunc(2*x**3 + 3*x**2 + 5*x + 7, 3) -x**3 - x + 1 """ allowed_flags(args, ['auto', 'polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('trunc', 1, exc) result = F.trunc(sympify(p)) if not opt.polys: return result.as_expr() else: return result def monic(f, *gens, **args): """ Divide all coefficients of ``f`` by ``LC(f)``. Examples ======== >>> monic(3*x**2 + 4*x + 2) x**2 + 4*x/3 + 2/3 """ allowed_flags(args, ['auto', 'polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('monic', 1, exc) result = F.monic(auto=opt.auto) if not opt.polys: return result.as_expr() else: return result def content(f, *gens, **args): """ Compute GCD of coefficients of ``f``. Examples ======== >>> content(6*x**2 + 8*x + 12) 2 """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('content', 1, exc) return F.content() def primitive(f, *gens, **args): """ Compute content and the primitive form of ``f``. Examples ======== >>> primitive(6*x**2 + 8*x + 12) (2, 3*x**2 + 4*x + 6) >>> eq = (2 + 2*x)*x + 2 Expansion is performed by default: >>> primitive(eq) (2, x**2 + x + 1) Set ``expand`` to False to shut this off. Note that the extraction will not be recursive; use the as_content_primitive method for recursive, non-destructive Rational extraction. >>> primitive(eq, expand=False) (1, x*(2*x + 2) + 2) >>> eq.as_content_primitive() (2, x*(x + 1) + 1) """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('primitive', 1, exc) cont, result = F.primitive() if not opt.polys: return cont, result.as_expr() else: return cont, result def compose(f, g, *gens, **args): """ Compute functional composition ``f(g)``. Examples ======== >>> compose(x**2 + x, x - 1) x**2 - x """ allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('compose', 2, exc) result = F.compose(G) if not opt.polys: return result.as_expr() else: return result def decompose(f, *gens, **args): """ Compute functional decomposition of ``f``. Examples ======== >>> decompose(x**4 + 2*x**3 - x - 1) [x**2 - x - 1, x**2 + x] """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('decompose', 1, exc) result = F.decompose() if not opt.polys: return [r.as_expr() for r in result] else: return result def sqf_norm(f, *gens, **args): """ Compute square-free norm of ``f``. Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and ``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``, where ``a`` is the algebraic extension of the ground domain. Examples ======== >>> sqf_norm(x**2 + 1, extension=[sqrt(3)]) (1, x**2 - 2*sqrt(3)*x + 4, x**4 - 4*x**2 + 16) """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('sqf_norm', 1, exc) s, g, r = F.sqf_norm() if not opt.polys: return Integer(s), g.as_expr(), r.as_expr() else: return Integer(s), g, r def sqf_part(f, *gens, **args): """ Compute square-free part of ``f``. Examples ======== >>> sqf_part(x**3 - 3*x - 2) x**2 - x - 2 """ allowed_flags(args, ['polys']) try: (F,), opt = parallel_poly_from_expr((f,), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('sqf_part', 1, exc) result = F.sqf_part() if not opt.polys: return result.as_expr() else: return result def _sorted_factors(factors, method): """Sort a list of ``(expr, exp)`` pairs.""" if method == 'sqf': def key(obj): poly, exp = obj rep = poly.rep return exp, len(rep), len(poly.gens), default_sort_key(rep) else: def key(obj): poly, exp = obj rep = poly.rep return len(rep), len(poly.gens), exp, default_sort_key(rep) return sorted(factors, key=key) def _factors_product(factors): """Multiply a list of ``(expr, exp)`` pairs.""" return Mul(*[f.as_expr()**k for f, k in factors]) def _symbolic_factor_list(expr, opt, method): """Helper function for :func:`_symbolic_factor`.""" coeff, factors = Integer(1), [] args = [i._eval_factor() if hasattr(i, '_eval_factor') else i for i in Mul.make_args(expr)] for arg in args: if arg.is_Number: coeff *= arg continue elif arg.is_Pow and arg.base is not E: base, exp = arg.base, arg.exp if base.is_Number: factors.append((base, exp)) continue else: base, exp = arg, Integer(1) try: if base.is_Poly: cls = base.func else: cls = Poly if opt.expand: base = base.expand() if opt.polys is None: opt.polys = base.is_Poly poly = cls._from_expr(base.as_expr(), opt) except GeneratorsNeeded: factors.append((base, exp)) else: func = getattr(poly, method + '_list') _coeff, _factors = func() if _coeff != 1: if exp.is_Integer: coeff *= _coeff**exp elif _coeff.is_positive: factors.append((_coeff, exp)) else: _factors.append((_coeff, Integer(1))) if exp == 1: factors.extend(_factors) elif exp.is_integer: factors.extend([(f, k*exp) for f, k in _factors]) else: other = [] for f, k in _factors: if f.as_expr().is_positive: factors.append((f, k*exp)) else: other.append((f, k)) factors.append((_factors_product(other), exp)) if method == 'sqf': factors = [(functools.reduce(operator.mul, (f for f, _ in factors if _ == k)), k) for k in set(dict(factors).values())] return coeff, factors def _symbolic_factor(expr, opt, method): """Helper function for :func:`_factor`.""" if isinstance(expr, Expr) and not expr.is_Relational: if hasattr(expr, '_eval_factor'): return expr._eval_factor() coeff, factors = _symbolic_factor_list(together(expr), opt, method) return _keep_coeff(coeff, _factors_product(factors)) elif hasattr(expr, 'args'): return expr.func(*[_symbolic_factor(arg, opt, method) for arg in expr.args]) elif hasattr(expr, '__iter__'): return expr.__class__([_symbolic_factor(arg, opt, method) for arg in expr]) else: raise NotImplementedError def _generic_factor_list(expr, gens, args, method): """Helper function for :func:`sqf_list` and :func:`factor_list`.""" allowed_flags(args, ['frac', 'polys']) opt = build_options(gens, args) expr = sympify(expr) if isinstance(expr, Expr) and not expr.is_Relational: numer, denom = together(expr).as_numer_denom() cp, fp = _symbolic_factor_list(numer, opt, method) cq, fq = _symbolic_factor_list(denom, opt, method) if fq and not opt.frac: raise PolynomialError(f'a polynomial expected, got {expr}') _opt = opt.clone({'expand': True}) if not _opt.get('gens'): _opt['gens'] = set().union(*[set(f.gens) for f, _ in fp + fq if f.is_Poly]) for factors in (fp, fq): for i, (f, k) in enumerate(factors): if not f.is_Poly: f = Poly._from_expr(f, _opt) factors[i] = (f, k) fp = _sorted_factors(fp, method) fq = _sorted_factors(fq, method) if not opt.polys: fp = [(f.as_expr(), k) for f, k in fp] fq = [(f.as_expr(), k) for f, k in fq] coeff = cp/cq if not opt.frac: return coeff, fp else: return coeff, fp, fq else: raise PolynomialError(f'a polynomial expected, got {expr}') def _generic_factor(expr, gens, args, method): """Helper function for :func:`sqf` and :func:`factor`.""" allowed_flags(args, []) opt = build_options(gens, args) return _symbolic_factor(sympify(expr), opt, method) def to_rational_coeffs(f): """ Try to transform a polynomial to have rational coefficients. try to find a transformation ``x = alpha*y`` ``f(x) = lc*alpha**n * g(y)`` where ``g`` is a polynomial with rational coefficients, ``lc`` the leading coefficient. If this fails, try ``x = y + beta`` ``f(x) = g(y)`` Returns ``None`` if ``g`` not found; ``(lc, alpha, None, g)`` in case of rescaling ``(None, None, beta, g)`` in case of translation Notes ===== Currently it transforms only polynomials without roots larger than 2. Examples ======== >>> p = (((x**2-1)*(x-2)).subs({x: x*(1 + sqrt(2))})).as_poly(x, domain=EX) >>> lc, r, _, g = to_rational_coeffs(p) >>> lc, r (7 + 5*sqrt(2), -2*sqrt(2) + 2) >>> g Poly(x**3 + x**2 - 1/4*x - 1/4, x, domain='QQ') >>> r1 = simplify(1/r) >>> (lc*r**3*(g.as_expr()).subs({x: x*r1})).as_poly(x, domain=EX) == p True """ from ..simplify import simplify def _try_rescale(f, f1=None): """ Try rescaling ``x -> alpha*x`` to convert f to a polynomial with rational coefficients. Returns ``alpha, f``; if the rescaling is successful, ``alpha`` is the rescaling factor, and ``f`` is the rescaled polynomial; else ``alpha`` is ``None``. """ from ..core import Add if f.is_multivariate or not (f.gens[0]).is_Atom: return n = f.degree() lc = f.LC() f1 = f1 or f1.monic() coeffs = f1.all_coeffs()[:-1] coeffs = [simplify(coeffx) for coeffx in coeffs] if coeffs[1]: rescale1_x = simplify(coeffs[1]/coeffs[0]) coeffs1 = [] for i in range(len(coeffs)): coeffx = simplify(coeffs[n - i - 1]*rescale1_x**(i + 1)) if not coeffx.is_rational: break coeffs1.append(coeffx) else: rescale_x = simplify(1/rescale1_x) x = f.gens[0] v = [x**n] for i in range(1, n + 1): v.append(coeffs1[i - 1]*x**(n - i)) f = Add(*v) f = Poly(f) return lc, rescale_x, f def _try_translate(f, f1=None): """ Try translating ``x -> x + alpha`` to convert f to a polynomial with rational coefficients. Returns ``alpha, f``; if the translating is successful, ``alpha`` is the translating factor, and ``f`` is the shifted polynomial; else ``alpha`` is ``None``. """ from ..core import Add if f.is_multivariate or not (f.gens[0]).is_Atom: return n = f.degree() f1 = f1 or f1.monic() coeffs = f1.all_coeffs()[:-1] c = simplify(coeffs[-1]) if c and not c.is_rational: func = Add if c.is_Add: args = c.args func = c.func else: args = [c] sifted = sift(args, lambda z: z.is_rational) c2 = sifted[False] alpha = -func(*c2)/n f2 = f1.shift(alpha) return alpha, f2 def _has_square_roots(p): """Return True if ``f`` is a sum with square roots but no other root.""" from ..core.exprtools import Factors coeffs = p.coeffs() has_sq = False for y in coeffs: for x in Add.make_args(y): f = Factors(x).factors r = [wx.denominator for b, wx in f.items() if b.is_number and wx.is_Rational and wx.denominator >= 2] if not r: continue if min(r) == 2: has_sq = True if max(r) > 2: return False return has_sq if f.domain.is_ExpressionDomain and _has_square_roots(f): f1 = f.monic() r = _try_rescale(f, f1) if r: return r[0], r[1], None, r[2] else: r = _try_translate(f, f1) if r: return None, None, r[0], r[1] def sqf_list(f, *gens, **args): """ Compute a list of square-free factors of ``f``. Examples ======== >>> sqf_list(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16) (2, [(x + 1, 2), (x + 2, 3)]) """ return _generic_factor_list(f, gens, args, method='sqf') def sqf(f, *gens, **args): """ Compute square-free factorization of ``f``. Examples ======== >>> sqf(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16) 2*(x + 1)**2*(x + 2)**3 """ return _generic_factor(f, gens, args, method='sqf') def factor_list(f, *gens, **args): """ Compute a list of irreducible factors of ``f``. Examples ======== >>> factor_list(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y) (2, [(x + y, 1), (x**2 + 1, 2)]) """ return _generic_factor_list(f, gens, args, method='factor') def factor(f, *gens, **args): """ Compute the factorization of expression, ``f``, into irreducibles. (To factor an integer into primes, use ``factorint``.) There two modes implemented: symbolic and formal. If ``f`` is not an instance of :class:`Poly` and generators are not specified, then the former mode is used. Otherwise, the formal mode is used. In symbolic mode, :func:`factor` will traverse the expression tree and factor its components without any prior expansion, unless an instance of :class:`~diofant.core.add.Add` is encountered (in this case formal factorization is used). This way :func:`factor` can handle large or symbolic exponents. By default, the factorization is computed over the rationals. To factor over other domain, e.g. an algebraic or finite field, use appropriate options: ``extension``, ``modulus`` or ``domain``. Examples ======== >>> factor(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y) 2*(x + y)*(x**2 + 1)**2 >>> factor(x**2 + 1) x**2 + 1 >>> factor(x**2 + 1, modulus=2) (x + 1)**2 >>> factor(x**2 + 1, gaussian=True) (x - I)*(x + I) >>> factor(x**2 - 2, extension=sqrt(2)) (x - sqrt(2))*(x + sqrt(2)) >>> factor((x**2 - 1)/(x**2 + 4*x + 4)) (x - 1)*(x + 1)/(x + 2)**2 >>> factor((x**2 + 4*x + 4)**10000000*(x**2 + 1)) (x + 2)**20000000*(x**2 + 1) By default, factor deals with an expression as a whole: >>> eq = 2**(x**2 + 2*x + 1) >>> factor(eq) 2**(x**2 + 2*x + 1) If the ``deep`` flag is True then subexpressions will be factored: >>> factor(eq, deep=True) 2**((x + 1)**2) See Also ======== diofant.ntheory.factor_.factorint """ f = sympify(f) if args.pop('deep', False): partials = {} muladd = f.atoms(Mul, Add) for p in muladd: fac = factor(p, *gens, **args) if (fac.is_Mul or fac.is_Pow) and fac != p: partials[p] = fac return f.xreplace(partials) try: return _generic_factor(f, gens, args, method='factor') except PolynomialError as msg: if not f.is_commutative: from ..core.exprtools import factor_nc return factor_nc(f) else: raise PolynomialError(msg) def count_roots(f, inf=None, sup=None): """ Return the number of roots of ``f`` in ``[inf, sup]`` interval. If one of ``inf`` or ``sup`` is complex, it will return the number of roots in the complex rectangle with corners at ``inf`` and ``sup``. Examples ======== >>> count_roots(x**4 - 4, -3, 3) 2 >>> count_roots(x**4 - 4, 0, 1 + 3*I) 1 """ try: F = Poly(f, greedy=False) except GeneratorsNeeded: raise PolynomialError(f"can't count roots of {f}, not a polynomial") return F.count_roots(inf=inf, sup=sup) def real_roots(f, multiple=True): """ Return a list of real roots with multiplicities of ``f``. Examples ======== >>> real_roots(2*x**3 - 7*x**2 + 4*x + 4) [-1/2, 2, 2] """ try: F = Poly(f, greedy=False) except GeneratorsNeeded: raise PolynomialError(f"can't compute real roots of {f}, " 'not a polynomial') return F.real_roots(multiple=multiple) def nroots(f, n=15, maxsteps=50, cleanup=True): """ Compute numerical approximations of roots of ``f``. Examples ======== >>> nroots(x**2 - 3, n=15) [-1.73205080756888, 1.73205080756888] >>> nroots(x**2 - 3, n=30) [-1.73205080756887729352744634151, 1.73205080756887729352744634151] """ try: F = Poly(f, greedy=False) except GeneratorsNeeded: raise PolynomialError( f"can't compute numerical roots of {f}, not a polynomial") return F.nroots(n=n, maxsteps=maxsteps, cleanup=cleanup) def cancel(f, *gens, **args): """ Cancel common factors in a rational function ``f``. Examples ======== >>> A = Symbol('A', commutative=False) >>> cancel((2*x**2 - 2)/(x**2 - 2*x + 1)) (2*x + 2)/(x - 1) >>> cancel((sqrt(3) + sqrt(15)*A)/(sqrt(2) + sqrt(10)*A)) sqrt(6)/2 """ from ..core.exprtools import factor_terms from ..functions import Piecewise allowed_flags(args, ['polys']) f = sympify(f) if not isinstance(f, Tuple): if f.is_Atom or isinstance(f, Relational) or not isinstance(f, Expr): return f f = factor_terms(f, radical=True) p, q = f.as_numer_denom() elif len(f) == 2: p, q = f else: return factor_terms(f) try: (F, G), opt = parallel_poly_from_expr((p, q), *gens, **args) except PolificationFailed: if not isinstance(f, (tuple, Tuple)): return f else: return Integer(1), p, q except PolynomialError: assert not f.is_commutative or f.has(Piecewise) # Handling of noncommutative and/or piecewise expressions if f.is_Add or f.is_Mul: sifted = sift(f.args, lambda x: x.is_commutative is True and not x.has(Piecewise)) c, nc = sifted[True], sifted[False] nc = [cancel(i) for i in nc] return f.func(cancel(f.func._from_args(c)), *nc) else: reps = [] pot = preorder_traversal(f) next(pot) for e in pot: # XXX: This should really skip anything that's not Expr. if isinstance(e, (tuple, Tuple, BooleanAtom)): continue reps.append((e, cancel(e))) pot.skip() # this was handled successfully return f.xreplace(dict(reps)) c, P, Q = F.cancel(G) if not isinstance(f, (tuple, Tuple)): return c*(P.as_expr()/Q.as_expr()) else: if not opt.polys: return c, P.as_expr(), Q.as_expr() else: return c, P, Q def reduced(f, G, *gens, **args): """ Reduces a polynomial ``f`` modulo a set of polynomials ``G``. Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``, computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r`` such that ``f = q_1*g_1 + ... + q_n*g_n + r``, where ``r`` vanishes or ``r`` is a completely reduced polynomial with respect to ``G``. Examples ======== >>> reduced(2*x**4 + y**2 - x**2 + y**3, [x**3 - x, y**3 - y]) ([2*x, 1], x**2 + y**2 + y) """ allowed_flags(args, ['polys', 'auto']) try: polys, opt = parallel_poly_from_expr([f] + list(G), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('reduced', 0, exc) domain = opt.domain retract = False if opt.auto and domain.is_Ring and not domain.is_Field: opt = opt.clone({'domain': domain.field}) retract = True _ring = opt.domain.poly_ring(*opt.gens, order=opt.order) for i, poly in enumerate(polys): poly = dict(poly.set_domain(opt.domain).rep) polys[i] = _ring.from_dict(poly) Q, r = polys[0].div(polys[1:]) Q = [Poly._from_dict(dict(q), opt) for q in Q] r = Poly._from_dict(dict(r), opt) if retract: try: _Q, _r = [q.to_ring() for q in Q], r.to_ring() except CoercionFailed: pass else: Q, r = _Q, _r if not opt.polys: return [q.as_expr() for q in Q], r.as_expr() else: return Q, r def groebner(F, *gens, **args): r""" Computes the reduced Gröbner basis for a set of polynomials. Parameters ========== F : list a set of polynomials \*gens : tuple polynomial generators \**args : dict a dictionary of parameters, namely order : str, optional Monomial order, defaults to ``lex``. method : {'buchberger', 'f5b'}, optional Set algorithm to compute Gröbner basis. By default, an improved implementation of the Buchberger algorithm is used. field : bool, optional Force coefficients domain to be a field. Defaults to False. Examples ======== >>> F = [x*y - 2*x, 2*x**2 - y**2] >>> groebner(F) GroebnerBasis([2*x**2 - y**2, x*y - 2*x, y**3 - 2*y**2], x, y, domain='ZZ', order='lex') >>> groebner(F, order=grevlex) GroebnerBasis([y**3 - 2*y**2, 2*x**2 - y**2, x*y - 2*x], x, y, domain='ZZ', order='grevlex') >>> groebner(F, field=True) GroebnerBasis([x**2 - y**2/2, x*y - 2*x, y**3 - 2*y**2], x, y, domain='QQ', order='lex') References ========== * :cite:`Buchberger2001systems` * :cite:`Cox2015ideals` See Also ======== diofant.solvers.polysys.solve_poly_system """ return GroebnerBasis(F, *gens, **args) class GroebnerBasis(Basic): """Represents a reduced Gröbner basis.""" def __new__(cls, F, *gens, **args): """Compute a reduced Gröbner basis for a system of polynomials.""" allowed_flags(args, ['polys', 'method']) try: polys, opt = parallel_poly_from_expr(F, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('groebner', len(F), exc) ring = opt.domain.poly_ring(*opt.gens, order=opt.order) if not ring.domain.is_Exact: raise ValueError(f'Domain must be exact, got {ring.domain}') polys = [ring.from_dict(dict(_.rep)) for _ in polys if not _.is_zero] G = _groebner(polys, ring, method=opt.method) G = [Poly._from_dict(dict(g), opt) for g in G] return cls._new(G, opt) @classmethod def _new(cls, basis, options): obj = Basic.__new__(cls) obj._basis = tuple(basis) obj._options = options return obj @property def args(self): return (Tuple(*self.exprs),) + self.gens @property def exprs(self): return [poly.as_expr() for poly in self._basis] @property def polys(self): return list(self._basis) @property def gens(self): return self._options.gens @property def domain(self): return self._options.domain @property def order(self): return self._options.order def __len__(self): return len(self._basis) def __iter__(self): if self._options.polys: return iter(self.polys) else: return iter(self.exprs) def __getitem__(self, item): if self._options.polys: basis = self.polys else: basis = self.exprs return basis[item] def __hash__(self): return hash((self._basis, tuple(sorted(self._options.items())))) def __eq__(self, other): if isinstance(other, self.__class__): return self._basis == other._basis and self._options == other._options elif iterable(other): return self.polys == list(other) or self.exprs == list(other) else: return False @property def dimension(self): """Dimension of the ideal, generated by a Gröbner basis.""" sets = self.independent_sets if sets is not None: return max(len(s) for s in sets) @property def independent_sets(self): """Compute independent sets for ideal, generated by a Gröbner basis. References ========== * :cite:`Kredel1988indep` """ if self.contains(Integer(1)): return HTG = [_.LM(order=self.order) for _ in self.polys] def dimrec(S, U, M): U1 = U.copy() while U1: x = U1.pop(0) S1 = S + [x] t = Monomial(Mul(*S1), self.gens) for ht in HTG: if all(a and b or not a for a, b in zip(ht, t)): break else: M = dimrec(S1, U1, M) if any(all(_ in m for _ in S) for m in M): return M else: return [S] + M return dimrec([], list(self.gens), []) def set_order(self, order): """ Convert a Gröbner basis from one ordering to another. Notes ===== The FGLM algorithm :cite:`Faugere1993groebner` used to convert reduced Gröbner bases of zero-dimensional ideals from one ordering to another. Sometimes it is infeasible to compute a Gröbner basis with respect to a particular ordering directly. Examples ======== >>> F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1] >>> G = groebner(F, order='grlex') >>> G.set_order('lex') == groebner(F, order='lex') True """ src_order = self.order dst_order = monomial_key(order) if src_order == dst_order: return self if self.dimension != 0: raise NotImplementedError("can't convert Gröbner bases of " 'ideals with positive dimension') polys = self.polys domain = self.domain opt = self._options.clone({'domain': domain.field, 'order': dst_order}) _ring = opt.domain.poly_ring(*opt.gens, order=src_order) for i, poly in enumerate(polys): poly = dict(poly.set_domain(opt.domain).rep) polys[i] = _ring.from_dict(poly) G = matrix_fglm(polys, _ring, dst_order) G = [Poly._from_dict(dict(g), opt) for g in G] if not domain.is_Field: G = [g.clear_denoms(convert=True)[1] for g in G] opt.domain = domain return self._new(G, opt) def reduce(self, expr, auto=True): """ Reduces a polynomial modulo a Gröbner basis. Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``, computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r`` such that ``f = q_1*f_1 + ... + q_n*f_n + r``, where ``r`` vanishes or ``r`` is a completely reduced polynomial with respect to ``G``. Examples ======== >>> f = 2*x**4 - x**2 + y**3 + y**2 >>> G = groebner([x**3 - x, y**3 - y]) >>> G.reduce(f) ([2*x, 1], x**2 + y**2 + y) >>> Q, r = _ >>> expand(sum(q*g for q, g in zip(Q, G)) + r) 2*x**4 - x**2 + y**3 + y**2 >>> _ == f True """ poly = Poly._from_expr(expr, self._options) polys = [poly] + list(self._basis) opt = self._options domain = self.domain retract = False if auto and domain.is_Ring and not domain.is_Field: opt = self._options.clone({'domain': domain.field}) retract = True _ring = opt.domain.poly_ring(*opt.gens, order=opt.order) for i, poly in enumerate(polys): poly = dict(poly.set_domain(opt.domain).rep) polys[i] = _ring.from_dict(poly) Q, r = polys[0].div(polys[1:]) Q = [Poly._from_dict(dict(q), opt) for q in Q] r = Poly._from_dict(dict(r), opt) if retract: try: _Q, _r = [q.to_ring() for q in Q], r.to_ring() except CoercionFailed: pass else: Q, r = _Q, _r if not opt.polys: return [q.as_expr() for q in Q], r.as_expr() else: return Q, r def contains(self, poly): """ Check if ``poly`` belongs the ideal generated by ``self``. Examples ======== >>> f = 2*x**3 + y**3 + 3*y >>> G = groebner([x**2 + y**2 - 1, x*y - 2]) >>> G.contains(f) True >>> G.contains(f + 1) False """ return self.reduce(poly)[1] == 0 def poly(expr, *gens, **args): """ Efficiently transform an expression into a polynomial. Examples ======== >>> poly(x*(x**2 + x - 1)**2) Poly(x**5 + 2*x**4 - x**3 - 2*x**2 + x, x, domain='ZZ') """ allowed_flags(args, []) def _poly(expr, opt): terms, poly_terms = [], [] for term in Add.make_args(expr): factors, poly_factors = [], [] for factor in Mul.make_args(term): if factor.is_Add: poly_factors.append(_poly(factor, opt)) elif (factor.is_Pow and factor.base.is_Add and factor.exp.is_Integer and factor.exp >= 0): poly_factors.append(_poly(factor.base, opt)**factor.exp) else: factors.append(factor) if not poly_factors: terms.append(term) else: product = poly_factors[0] for factor in poly_factors[1:]: product *= factor if factors: factor = Mul(*factors) if factor.is_Number: product *= factor else: product *= Poly._from_expr(factor, opt) poly_terms.append(product) if not poly_terms: result = Poly._from_expr(expr, opt) else: result = poly_terms[0] for term in poly_terms[1:]: result += term if terms: term = Add(*terms) if term.is_Number: result += term else: result += Poly._from_expr(term, opt) return result.reorder(*opt.get('gens', ()), **args) expr = sympify(expr) if expr.is_Poly: return Poly(expr, *gens, **args) opt = build_options(gens, args) no_gens = not opt.gens if no_gens: gens = _find_gens([expr], opt) opt = opt.clone({'gens': gens}) if 'expand' not in args: opt = opt.clone({'expand': False}) res = _poly(expr, opt) if no_gens: res = res.exclude() return res
import pytest from diofant import (DiracDelta, Eq, Heaviside, I, Piecewise, Rational, Symbol, adjoint, conjugate, nan, pi, sign, sqrt, symbols, transpose) from diofant.abc import x, y, z from diofant.core.function import ArgumentIndexError __all__ = () def test_DiracDelta(): i = Symbol('i', nonzero=True) j = Symbol('j', positive=True) k = Symbol('k', negative=True) assert DiracDelta(1) == 0 assert DiracDelta(5.1) == 0 assert DiracDelta(-pi) == 0 assert DiracDelta(5, 7) == 0 assert DiracDelta(i) == 0 assert DiracDelta(j) == 0 assert DiracDelta(k) == 0 assert DiracDelta(nan) == nan assert isinstance(DiracDelta(0), DiracDelta) assert isinstance(DiracDelta(x), DiracDelta) assert adjoint(DiracDelta(x)) == DiracDelta(x) assert adjoint(DiracDelta(x - y)) == DiracDelta(x - y) assert conjugate(DiracDelta(x)) == DiracDelta(x) assert conjugate(DiracDelta(x - y)) == DiracDelta(x - y) assert transpose(DiracDelta(x)) == DiracDelta(x) assert transpose(DiracDelta(x - y)) == DiracDelta(x - y) assert DiracDelta(x).diff(x) == DiracDelta(x, 1) assert DiracDelta(x, 1).diff(x) == DiracDelta(x, 2) assert DiracDelta(x).is_simple(x) is True assert DiracDelta(3*x).is_simple(x) is True assert DiracDelta(x**2).is_simple(x) is False assert DiracDelta(sqrt(x)).is_simple(x) is False assert DiracDelta(x).is_simple(y) is False assert DiracDelta(x*y).simplify(x) == DiracDelta(x)/abs(y) assert DiracDelta(x*y).simplify(y) == DiracDelta(y)/abs(x) assert DiracDelta(x**2*y).simplify(x) == DiracDelta(x**2*y) assert DiracDelta(y).simplify(x) == DiracDelta(y) assert DiracDelta((x - 1)*(x - 2)*(x - 3)).simplify(x) == \ DiracDelta(x - 3)/2 + DiracDelta(x - 2) + DiracDelta(x - 1)/2 pytest.raises(ArgumentIndexError, lambda: DiracDelta(x).fdiff(2)) pytest.raises(ValueError, lambda: DiracDelta(x, -1)) def test_heaviside(): x, y = symbols('x, y', extended_real=True) assert Heaviside(0) == 0.5 assert Heaviside(-5) == 0 assert Heaviside(1) == 1 assert Heaviside(nan) == nan assert Heaviside(x).is_real assert adjoint(Heaviside(x)) == Heaviside(x) assert adjoint(Heaviside(x - y)) == Heaviside(x - y) assert conjugate(Heaviside(x)) == Heaviside(x) assert conjugate(Heaviside(x - y)) == Heaviside(x - y) assert transpose(Heaviside(x)) == Heaviside(x) assert transpose(Heaviside(x - y)) == Heaviside(x - y) assert Heaviside(x).diff(x) == DiracDelta(x) assert Heaviside(z + I).is_Function is True assert Heaviside(I*z).is_Function is True pytest.raises(ArgumentIndexError, lambda: Heaviside(x).fdiff(2)) pytest.raises(ValueError, lambda: Heaviside(I)) pytest.raises(ValueError, lambda: Heaviside(2 + 3*I)) def test_rewrite(): x = Symbol('x', extended_real=True) assert Heaviside(x).rewrite(Piecewise) == \ Piecewise((1, x > 0), (Rational(1, 2), Eq(x, 0)), (0, True)) assert Heaviside(y).rewrite(Piecewise) == Heaviside(y) assert Heaviside(x).rewrite(sign) == (sign(x)+1)/2 assert Heaviside(y).rewrite(sign) == Heaviside(y)
diofant/diofant
diofant/tests/functions/test_delta_functions.py
diofant/polys/polytools.py
""" DataFrame --------- An efficient 2D container for potentially mixed-type time series or other labeled data series. Similar to its R counterpart, data.frame, except providing automatic data alignment and a host of useful data manipulation methods having to do with the labeling information """ import collections from collections import OrderedDict, abc import functools from io import StringIO import itertools import sys import warnings from textwrap import dedent from typing import FrozenSet, List, Optional, Set, Type, Union import numpy as np import numpy.ma as ma from pandas._config import get_option from pandas._libs import lib, algos as libalgos from pandas.util._decorators import (Appender, Substitution, rewrite_axis_style_signature, deprecate_kwarg) from pandas.util._validators import (validate_bool_kwarg, validate_axis_style_args) from pandas.compat import PY36, raise_with_traceback from pandas.compat.numpy import function as nv from pandas.core.arrays.sparse import SparseFrameAccessor from pandas.core.dtypes.cast import ( maybe_upcast, cast_scalar_to_array, infer_dtype_from_scalar, maybe_cast_to_datetime, maybe_infer_to_datetimelike, maybe_convert_platform, maybe_downcast_to_dtype, invalidate_string_dtypes, coerce_to_dtypes, maybe_upcast_putmask, find_common_type) from pandas.core.dtypes.common import ( is_dict_like, is_datetime64tz_dtype, is_object_dtype, is_extension_type, is_extension_array_dtype, is_datetime64_any_dtype, is_bool_dtype, is_integer_dtype, is_float_dtype, is_integer, is_scalar, is_dtype_equal, needs_i8_conversion, infer_dtype_from_object, ensure_float64, ensure_int64, ensure_platform_int, is_list_like, is_nested_list_like, is_iterator, is_sequence, is_named_tuple) from pandas.core.dtypes.generic import ( ABCSeries, ABCDataFrame, ABCIndexClass, ABCMultiIndex) from pandas.core.dtypes.missing import isna, notna from pandas.core import algorithms from pandas.core import common as com from pandas.core import nanops from pandas.core import ops from pandas.core.accessor import CachedAccessor from pandas.core.arrays import Categorical, ExtensionArray from pandas.core.arrays.datetimelike import ( DatetimeLikeArrayMixin as DatetimeLikeArray ) from pandas.core.generic import NDFrame, _shared_docs from pandas.core.index import (Index, MultiIndex, ensure_index, ensure_index_from_sequences) from pandas.core.indexes import base as ibase from pandas.core.indexes.datetimes import DatetimeIndex from pandas.core.indexes.period import PeriodIndex from pandas.core.indexing import (maybe_droplevels, convert_to_index_sliceable, check_bool_indexer) from pandas.core.internals import BlockManager from pandas.core.internals.construction import ( masked_rec_array_to_mgr, get_names_from_index, to_arrays, reorder_arrays, init_ndarray, init_dict, arrays_to_mgr, sanitize_index) from pandas.core.series import Series from pandas.io.formats import console from pandas.io.formats import format as fmt from pandas.io.formats.printing import pprint_thing import pandas.plotting # --------------------------------------------------------------------- # Docstring templates _shared_doc_kwargs = dict( axes='index, columns', klass='DataFrame', axes_single_arg="{0 or 'index', 1 or 'columns'}", axis="""axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index': apply function to each column. If 1 or 'columns': apply function to each row.""", optional_by=""" by : str or list of str Name or list of names to sort by. - if `axis` is 0 or `'index'` then `by` may contain index levels and/or column labels - if `axis` is 1 or `'columns'` then `by` may contain column levels and/or index labels .. versionchanged:: 0.23.0 Allow specifying index or column level names.""", versionadded_to_excel='', optional_labels="""labels : array-like, optional New labels / index to conform the axis specified by 'axis' to.""", optional_axis="""axis : int or str, optional Axis to target. Can be either the axis name ('index', 'columns') or number (0, 1).""", ) _numeric_only_doc = """numeric_only : boolean, default None Include only float, int, boolean data. If None, will attempt to use everything, then use only numeric data """ _merge_doc = """ Merge DataFrame or named Series objects with a database-style join. The join is done on columns or indexes. If joining columns on columns, the DataFrame indexes *will be ignored*. Otherwise if joining indexes on indexes or indexes on a column or columns, the index will be passed on. Parameters ----------%s right : DataFrame or named Series Object to merge with. how : {'left', 'right', 'outer', 'inner'}, default 'inner' Type of merge to be performed. * left: use only keys from left frame, similar to a SQL left outer join; preserve key order. * right: use only keys from right frame, similar to a SQL right outer join; preserve key order. * outer: use union of keys from both frames, similar to a SQL full outer join; sort keys lexicographically. * inner: use intersection of keys from both frames, similar to a SQL inner join; preserve the order of the left keys. on : label or list Column or index level names to join on. These must be found in both DataFrames. If `on` is None and not merging on indexes then this defaults to the intersection of the columns in both DataFrames. left_on : label or list, or array-like Column or index level names to join on in the left DataFrame. Can also be an array or list of arrays of the length of the left DataFrame. These arrays are treated as if they are columns. right_on : label or list, or array-like Column or index level names to join on in the right DataFrame. Can also be an array or list of arrays of the length of the right DataFrame. These arrays are treated as if they are columns. left_index : bool, default False Use the index from the left DataFrame as the join key(s). If it is a MultiIndex, the number of keys in the other DataFrame (either the index or a number of columns) must match the number of levels. right_index : bool, default False Use the index from the right DataFrame as the join key. Same caveats as left_index. sort : bool, default False Sort the join keys lexicographically in the result DataFrame. If False, the order of the join keys depends on the join type (how keyword). suffixes : tuple of (str, str), default ('_x', '_y') Suffix to apply to overlapping column names in the left and right side, respectively. To raise an exception on overlapping columns use (False, False). copy : bool, default True If False, avoid copy if possible. indicator : bool or str, default False If True, adds a column to output DataFrame called "_merge" with information on the source of each row. If string, column with information on source of each row will be added to output DataFrame, and column will be named value of string. Information column is Categorical-type and takes on a value of "left_only" for observations whose merge key only appears in 'left' DataFrame, "right_only" for observations whose merge key only appears in 'right' DataFrame, and "both" if the observation's merge key is found in both. validate : str, optional If specified, checks if merge is of specified type. * "one_to_one" or "1:1": check if merge keys are unique in both left and right datasets. * "one_to_many" or "1:m": check if merge keys are unique in left dataset. * "many_to_one" or "m:1": check if merge keys are unique in right dataset. * "many_to_many" or "m:m": allowed, but does not result in checks. .. versionadded:: 0.21.0 Returns ------- DataFrame A DataFrame of the two merged objects. See Also -------- merge_ordered : Merge with optional filling/interpolation. merge_asof : Merge on nearest keys. DataFrame.join : Similar method using indices. Notes ----- Support for specifying index levels as the `on`, `left_on`, and `right_on` parameters was added in version 0.23.0 Support for merging named Series objects was added in version 0.24.0 Examples -------- >>> df1 = pd.DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [1, 2, 3, 5]}) >>> df2 = pd.DataFrame({'rkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [5, 6, 7, 8]}) >>> df1 lkey value 0 foo 1 1 bar 2 2 baz 3 3 foo 5 >>> df2 rkey value 0 foo 5 1 bar 6 2 baz 7 3 foo 8 Merge df1 and df2 on the lkey and rkey columns. The value columns have the default suffixes, _x and _y, appended. >>> df1.merge(df2, left_on='lkey', right_on='rkey') lkey value_x rkey value_y 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2 with specified left and right suffixes appended to any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', ... suffixes=('_left', '_right')) lkey value_left rkey value_right 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2, but raise an exception if the DataFrames have any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', suffixes=(False, False)) Traceback (most recent call last): ... ValueError: columns overlap but no suffix specified: Index(['value'], dtype='object') """ # ----------------------------------------------------------------------- # DataFrame class class DataFrame(NDFrame): """ Two-dimensional size-mutable, potentially heterogeneous tabular data structure with labeled axes (rows and columns). Arithmetic operations align on both row and column labels. Can be thought of as a dict-like container for Series objects. The primary pandas data structure. Parameters ---------- data : ndarray (structured or homogeneous), Iterable, dict, or DataFrame Dict can contain Series, arrays, constants, or list-like objects .. versionchanged :: 0.23.0 If data is a dict, argument order is maintained for Python 3.6 and later. index : Index or array-like Index to use for resulting frame. Will default to RangeIndex if no indexing information part of input data and no index provided columns : Index or array-like Column labels to use for resulting frame. Will default to RangeIndex (0, 1, 2, ..., n) if no column labels are provided dtype : dtype, default None Data type to force. Only a single dtype is allowed. If None, infer copy : boolean, default False Copy data from inputs. Only affects DataFrame / 2d ndarray input See Also -------- DataFrame.from_records : Constructor from tuples, also record arrays. DataFrame.from_dict : From dicts of Series, arrays, or dicts. DataFrame.from_items : From sequence of (key, value) pairs read_csv, pandas.read_table, pandas.read_clipboard. Examples -------- Constructing DataFrame from a dictionary. >>> d = {'col1': [1, 2], 'col2': [3, 4]} >>> df = pd.DataFrame(data=d) >>> df col1 col2 0 1 3 1 2 4 Notice that the inferred dtype is int64. >>> df.dtypes col1 int64 col2 int64 dtype: object To enforce a single dtype: >>> df = pd.DataFrame(data=d, dtype=np.int8) >>> df.dtypes col1 int8 col2 int8 dtype: object Constructing DataFrame from numpy ndarray: >>> df2 = pd.DataFrame(np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]), ... columns=['a', 'b', 'c']) >>> df2 a b c 0 1 2 3 1 4 5 6 2 7 8 9 """ @property def _constructor(self): return DataFrame _constructor_sliced = Series # type: Type[Series] _deprecations = NDFrame._deprecations | frozenset([ 'get_value', 'set_value', 'from_csv', 'from_items' ]) # type: FrozenSet[str] _accessors = set() # type: Set[str] @property def _constructor_expanddim(self): raise NotImplementedError("Not supported for DataFrames!") # ---------------------------------------------------------------------- # Constructors def __init__(self, data=None, index=None, columns=None, dtype=None, copy=False): if data is None: data = {} if dtype is not None: dtype = self._validate_dtype(dtype) if isinstance(data, DataFrame): data = data._data if isinstance(data, BlockManager): mgr = self._init_mgr(data, axes=dict(index=index, columns=columns), dtype=dtype, copy=copy) elif isinstance(data, dict): mgr = init_dict(data, index, columns, dtype=dtype) elif isinstance(data, ma.MaskedArray): import numpy.ma.mrecords as mrecords # masked recarray if isinstance(data, mrecords.MaskedRecords): mgr = masked_rec_array_to_mgr(data, index, columns, dtype, copy) # a masked array else: mask = ma.getmaskarray(data) if mask.any(): data, fill_value = maybe_upcast(data, copy=True) data.soften_mask() # set hardmask False if it was True data[mask] = fill_value else: data = data.copy() mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: data_columns = list(data.dtype.names) data = {k: data[k] for k in data_columns} if columns is None: columns = data_columns mgr = init_dict(data, index, columns, dtype=dtype) elif getattr(data, 'name', None) is not None: mgr = init_dict({data.name: data}, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) # For data is list-like, or Iterable (will consume into list) elif (isinstance(data, abc.Iterable) and not isinstance(data, (str, bytes))): if not isinstance(data, abc.Sequence): data = list(data) if len(data) > 0: if is_list_like(data[0]) and getattr(data[0], 'ndim', 1) == 1: if is_named_tuple(data[0]) and columns is None: columns = data[0]._fields arrays, columns = to_arrays(data, columns, dtype=dtype) columns = ensure_index(columns) # set the index if index is None: if isinstance(data[0], Series): index = get_names_from_index(data) elif isinstance(data[0], Categorical): index = ibase.default_index(len(data[0])) else: index = ibase.default_index(len(data)) mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) else: mgr = init_dict({}, index, columns, dtype=dtype) else: try: arr = np.array(data, dtype=dtype, copy=copy) except (ValueError, TypeError) as e: exc = TypeError('DataFrame constructor called with ' 'incompatible data and dtype: {e}'.format(e=e)) raise_with_traceback(exc) if arr.ndim == 0 and index is not None and columns is not None: values = cast_scalar_to_array((len(index), len(columns)), data, dtype=dtype) mgr = init_ndarray(values, index, columns, dtype=values.dtype, copy=False) else: raise ValueError('DataFrame constructor not properly called!') NDFrame.__init__(self, mgr, fastpath=True) # ---------------------------------------------------------------------- @property def axes(self): """ Return a list representing the axes of the DataFrame. It has the row axis labels and column axis labels as the only members. They are returned in that order. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.axes [RangeIndex(start=0, stop=2, step=1), Index(['col1', 'col2'], dtype='object')] """ return [self.index, self.columns] @property def shape(self): """ Return a tuple representing the dimensionality of the DataFrame. See Also -------- ndarray.shape Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.shape (2, 2) >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4], ... 'col3': [5, 6]}) >>> df.shape (2, 3) """ return len(self.index), len(self.columns) @property def _is_homogeneous_type(self): """ Whether all the columns in a DataFrame have the same type. Returns ------- bool Examples -------- >>> DataFrame({"A": [1, 2], "B": [3, 4]})._is_homogeneous_type True >>> DataFrame({"A": [1, 2], "B": [3.0, 4.0]})._is_homogeneous_type False Items with the same type but different sizes are considered different types. >>> DataFrame({ ... "A": np.array([1, 2], dtype=np.int32), ... "B": np.array([1, 2], dtype=np.int64)})._is_homogeneous_type False """ if self._data.any_extension_types: return len({block.dtype for block in self._data.blocks}) == 1 else: return not self._data.is_mixed_type # ---------------------------------------------------------------------- # Rendering Methods def _repr_fits_vertical_(self): """ Check length against max_rows. """ max_rows = get_option("display.max_rows") return len(self) <= max_rows def _repr_fits_horizontal_(self, ignore_width=False): """ Check if full repr fits in horizontal boundaries imposed by the display options width and max_columns. In case off non-interactive session, no boundaries apply. `ignore_width` is here so ipnb+HTML output can behave the way users expect. display.max_columns remains in effect. GH3541, GH3573 """ width, height = console.get_console_size() max_columns = get_option("display.max_columns") nb_columns = len(self.columns) # exceed max columns if ((max_columns and nb_columns > max_columns) or ((not ignore_width) and width and nb_columns > (width // 2))): return False # used by repr_html under IPython notebook or scripts ignore terminal # dims if ignore_width or not console.in_interactive_session(): return True if (get_option('display.width') is not None or console.in_ipython_frontend()): # check at least the column row for excessive width max_rows = 1 else: max_rows = get_option("display.max_rows") # when auto-detecting, so width=None and not in ipython front end # check whether repr fits horizontal by actually checking # the width of the rendered repr buf = StringIO() # only care about the stuff we'll actually print out # and to_string on entire frame may be expensive d = self if not (max_rows is None): # unlimited rows # min of two, where one may be None d = d.iloc[:min(max_rows, len(d))] else: return True d.to_string(buf=buf) value = buf.getvalue() repr_width = max(len(l) for l in value.split('\n')) return repr_width < width def _info_repr(self): """ True if the repr should show the info view. """ info_repr_option = (get_option("display.large_repr") == "info") return info_repr_option and not (self._repr_fits_horizontal_() and self._repr_fits_vertical_()) def __repr__(self): """ Return a string representation for a particular DataFrame. """ buf = StringIO("") if self._info_repr(): self.info(buf=buf) return buf.getvalue() max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") if get_option("display.expand_frame_repr"): width, _ = console.get_console_size() else: width = None self.to_string(buf=buf, max_rows=max_rows, max_cols=max_cols, line_width=width, show_dimensions=show_dimensions) return buf.getvalue() def _repr_html_(self): """ Return a html representation for a particular DataFrame. Mainly for IPython notebook. """ if self._info_repr(): buf = StringIO("") self.info(buf=buf) # need to escape the <class>, should be the first line. val = buf.getvalue().replace('<', r'&lt;', 1) val = val.replace('>', r'&gt;', 1) return '<pre>' + val + '</pre>' if get_option("display.notebook_repr_html"): max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") return self.to_html(max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, notebook=True) else: return None @Substitution(header='Write out the column names. If a list of strings ' 'is given, it is assumed to be aliases for the ' 'column names', col_space_type='int', col_space='The minimum width of each column') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_string(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', line_width=None): """ Render a DataFrame to a console-friendly tabular output. %(shared_params)s line_width : int, optional Width to wrap a line in characters. %(returns)s See Also -------- to_html : Convert DataFrame to HTML. Examples -------- >>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]} >>> df = pd.DataFrame(d) >>> print(df.to_string()) col1 col2 0 1 4 1 2 5 2 3 6 """ formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, line_width=line_width) formatter.to_string() if buf is None: result = formatter.buf.getvalue() return result # ---------------------------------------------------------------------- @property def style(self): """ Property returning a Styler object containing methods for building a styled HTML representation fo the DataFrame. See Also -------- io.formats.style.Styler """ from pandas.io.formats.style import Styler return Styler(self) def iteritems(self): r""" Iterator over (column name, Series) pairs. Iterates over the DataFrame columns, returning a tuple with the column name and the content as a Series. Yields ------ label : object The column names for the DataFrame being iterated over. content : Series The column entries belonging to each label, as a Series. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.itertuples : Iterate over DataFrame rows as namedtuples of the values. Examples -------- >>> df = pd.DataFrame({'species': ['bear', 'bear', 'marsupial'], ... 'population': [1864, 22000, 80000]}, ... index=['panda', 'polar', 'koala']) >>> df species population panda bear 1864 polar bear 22000 koala marsupial 80000 >>> for label, content in df.iteritems(): ... print('label:', label) ... print('content:', content, sep='\n') ... label: species content: panda bear polar bear koala marsupial Name: species, dtype: object label: population content: panda 1864 polar 22000 koala 80000 Name: population, dtype: int64 """ if self.columns.is_unique and hasattr(self, '_item_cache'): for k in self.columns: yield k, self._get_item_cache(k) else: for i, k in enumerate(self.columns): yield k, self._ixs(i, axis=1) def iterrows(self): """ Iterate over DataFrame rows as (index, Series) pairs. Yields ------ index : label or tuple of label The index of the row. A tuple for a `MultiIndex`. data : Series The data of the row as a Series. it : generator A generator that iterates over the rows of the frame. See Also -------- itertuples : Iterate over DataFrame rows as namedtuples of the values. iteritems : Iterate over (column name, Series) pairs. Notes ----- 1. Because ``iterrows`` returns a Series for each row, it does **not** preserve dtypes across the rows (dtypes are preserved across columns for DataFrames). For example, >>> df = pd.DataFrame([[1, 1.5]], columns=['int', 'float']) >>> row = next(df.iterrows())[1] >>> row int 1.0 float 1.5 Name: 0, dtype: float64 >>> print(row['int'].dtype) float64 >>> print(df['int'].dtype) int64 To preserve dtypes while iterating over the rows, it is better to use :meth:`itertuples` which returns namedtuples of the values and which is generally faster than ``iterrows``. 2. You should **never modify** something you are iterating over. This is not guaranteed to work in all cases. Depending on the data types, the iterator returns a copy and not a view, and writing to it will have no effect. """ columns = self.columns klass = self._constructor_sliced for k, v in zip(self.index, self.values): s = klass(v, index=columns, name=k) yield k, s def itertuples(self, index=True, name="Pandas"): """ Iterate over DataFrame rows as namedtuples. Parameters ---------- index : bool, default True If True, return the index as the first element of the tuple. name : str or None, default "Pandas" The name of the returned namedtuples or None to return regular tuples. Returns ------- iterator An object to iterate over namedtuples for each row in the DataFrame with the first field possibly being the index and following fields being the column values. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.iteritems : Iterate over (column name, Series) pairs. Notes ----- The column names will be renamed to positional names if they are invalid Python identifiers, repeated, or start with an underscore. With a large number of columns (>255), regular tuples are returned. Examples -------- >>> df = pd.DataFrame({'num_legs': [4, 2], 'num_wings': [0, 2]}, ... index=['dog', 'hawk']) >>> df num_legs num_wings dog 4 0 hawk 2 2 >>> for row in df.itertuples(): ... print(row) ... Pandas(Index='dog', num_legs=4, num_wings=0) Pandas(Index='hawk', num_legs=2, num_wings=2) By setting the `index` parameter to False we can remove the index as the first element of the tuple: >>> for row in df.itertuples(index=False): ... print(row) ... Pandas(num_legs=4, num_wings=0) Pandas(num_legs=2, num_wings=2) With the `name` parameter set we set a custom name for the yielded namedtuples: >>> for row in df.itertuples(name='Animal'): ... print(row) ... Animal(Index='dog', num_legs=4, num_wings=0) Animal(Index='hawk', num_legs=2, num_wings=2) """ arrays = [] fields = list(self.columns) if index: arrays.append(self.index) fields.insert(0, "Index") # use integer indexing because of possible duplicate column names arrays.extend(self.iloc[:, k] for k in range(len(self.columns))) # Python 3 supports at most 255 arguments to constructor if name is not None and len(self.columns) + index < 256: itertuple = collections.namedtuple(name, fields, rename=True) return map(itertuple._make, zip(*arrays)) # fallback to regular tuples return zip(*arrays) items = iteritems def __len__(self): """ Returns length of info axis, but here we use the index. """ return len(self.index) def dot(self, other): """ Compute the matrix multiplication between the DataFrame and other. This method computes the matrix product between the DataFrame and the values of an other Series, DataFrame or a numpy array. It can also be called using ``self @ other`` in Python >= 3.5. Parameters ---------- other : Series, DataFrame or array-like The other object to compute the matrix product with. Returns ------- Series or DataFrame If other is a Series, return the matrix product between self and other as a Serie. If other is a DataFrame or a numpy.array, return the matrix product of self and other in a DataFrame of a np.array. See Also -------- Series.dot: Similar method for Series. Notes ----- The dimensions of DataFrame and other must be compatible in order to compute the matrix multiplication. In addition, the column names of DataFrame and the index of other must contain the same values, as they will be aligned prior to the multiplication. The dot method for Series computes the inner product, instead of the matrix product here. Examples -------- Here we multiply a DataFrame with a Series. >>> df = pd.DataFrame([[0, 1, -2, -1], [1, 1, 1, 1]]) >>> s = pd.Series([1, 1, 2, 1]) >>> df.dot(s) 0 -4 1 5 dtype: int64 Here we multiply a DataFrame with another DataFrame. >>> other = pd.DataFrame([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(other) 0 1 0 1 4 1 2 2 Note that the dot method give the same result as @ >>> df @ other 0 1 0 1 4 1 2 2 The dot method works also if other is an np.array. >>> arr = np.array([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(arr) 0 1 0 1 4 1 2 2 Note how shuffling of the objects does not change the result. >>> s2 = s.reindex([1, 0, 2, 3]) >>> df.dot(s2) 0 -4 1 5 dtype: int64 """ if isinstance(other, (Series, DataFrame)): common = self.columns.union(other.index) if (len(common) > len(self.columns) or len(common) > len(other.index)): raise ValueError('matrices are not aligned') left = self.reindex(columns=common, copy=False) right = other.reindex(index=common, copy=False) lvals = left.values rvals = right.values else: left = self lvals = self.values rvals = np.asarray(other) if lvals.shape[1] != rvals.shape[0]: raise ValueError('Dot product shape mismatch, ' '{s} vs {r}'.format(s=lvals.shape, r=rvals.shape)) if isinstance(other, DataFrame): return self._constructor(np.dot(lvals, rvals), index=left.index, columns=other.columns) elif isinstance(other, Series): return Series(np.dot(lvals, rvals), index=left.index) elif isinstance(rvals, (np.ndarray, Index)): result = np.dot(lvals, rvals) if result.ndim == 2: return self._constructor(result, index=left.index) else: return Series(result, index=left.index) else: # pragma: no cover raise TypeError('unsupported type: {oth}'.format(oth=type(other))) def __matmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.dot(other) def __rmatmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.T.dot(np.transpose(other)).T # ---------------------------------------------------------------------- # IO methods (to / from other formats) @classmethod def from_dict(cls, data, orient='columns', dtype=None, columns=None): """ Construct DataFrame from dict of array-like or dicts. Creates DataFrame object from dictionary by columns or by index allowing dtype specification. Parameters ---------- data : dict Of the form {field : array-like} or {field : dict}. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the passed dict should be the columns of the resulting DataFrame, pass 'columns' (default). Otherwise if the keys should be rows, pass 'index'. dtype : dtype, default None Data type to force, otherwise infer. columns : list, default None Column labels to use when ``orient='index'``. Raises a ValueError if used with ``orient='columns'``. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- DataFrame.from_records : DataFrame from ndarray (structured dtype), list of tuples, dict, or DataFrame. DataFrame : DataFrame object creation using constructor. Examples -------- By default the keys of the dict become the DataFrame columns: >>> data = {'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data) col_1 col_2 0 3 a 1 2 b 2 1 c 3 0 d Specify ``orient='index'`` to create the DataFrame using dictionary keys as rows: >>> data = {'row_1': [3, 2, 1, 0], 'row_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data, orient='index') 0 1 2 3 row_1 3 2 1 0 row_2 a b c d When using the 'index' orientation, the column names can be specified manually: >>> pd.DataFrame.from_dict(data, orient='index', ... columns=['A', 'B', 'C', 'D']) A B C D row_1 3 2 1 0 row_2 a b c d """ index = None orient = orient.lower() if orient == 'index': if len(data) > 0: # TODO speed up Series case if isinstance(list(data.values())[0], (Series, dict)): data = _from_nested_dict(data) else: data, index = list(data.values()), list(data.keys()) elif orient == 'columns': if columns is not None: raise ValueError("cannot use columns parameter with " "orient='columns'") else: # pragma: no cover raise ValueError('only recognize index or columns for orient') return cls(data, index=index, columns=columns, dtype=dtype) def to_numpy(self, dtype=None, copy=False): """ Convert the DataFrame to a NumPy array. .. versionadded:: 0.24.0 By default, the dtype of the returned array will be the common NumPy dtype of all types in the DataFrame. For example, if the dtypes are ``float16`` and ``float32``, the results dtype will be ``float32``. This may require copying data and coercing values, which may be expensive. Parameters ---------- dtype : str or numpy.dtype, optional The dtype to pass to :meth:`numpy.asarray` copy : bool, default False Whether to ensure that the returned value is a not a view on another array. Note that ``copy=False`` does not *ensure* that ``to_numpy()`` is no-copy. Rather, ``copy=True`` ensure that a copy is made, even if not strictly necessary. Returns ------- numpy.ndarray See Also -------- Series.to_numpy : Similar method for Series. Examples -------- >>> pd.DataFrame({"A": [1, 2], "B": [3, 4]}).to_numpy() array([[1, 3], [2, 4]]) With heterogenous data, the lowest common type will have to be used. >>> df = pd.DataFrame({"A": [1, 2], "B": [3.0, 4.5]}) >>> df.to_numpy() array([[1. , 3. ], [2. , 4.5]]) For a mix of numeric and non-numeric types, the output array will have object dtype. >>> df['C'] = pd.date_range('2000', periods=2) >>> df.to_numpy() array([[1, 3.0, Timestamp('2000-01-01 00:00:00')], [2, 4.5, Timestamp('2000-01-02 00:00:00')]], dtype=object) """ result = np.array(self.values, dtype=dtype, copy=copy) return result def to_dict(self, orient='dict', into=dict): """ Convert the DataFrame to a dictionary. The type of the key-value pairs can be customized with the parameters (see below). Parameters ---------- orient : str {'dict', 'list', 'series', 'split', 'records', 'index'} Determines the type of the values of the dictionary. - 'dict' (default) : dict like {column -> {index -> value}} - 'list' : dict like {column -> [values]} - 'series' : dict like {column -> Series(values)} - 'split' : dict like {'index' -> [index], 'columns' -> [columns], 'data' -> [values]} - 'records' : list like [{column -> value}, ... , {column -> value}] - 'index' : dict like {index -> {column -> value}} Abbreviations are allowed. `s` indicates `series` and `sp` indicates `split`. into : class, default dict The collections.abc.Mapping subclass used for all Mappings in the return value. Can be the actual class or an empty instance of the mapping type you want. If you want a collections.defaultdict, you must pass it initialized. .. versionadded:: 0.21.0 Returns ------- dict, list or collections.abc.Mapping Return a collections.abc.Mapping object representing the DataFrame. The resulting transformation depends on the `orient` parameter. See Also -------- DataFrame.from_dict: Create a DataFrame from a dictionary. DataFrame.to_json: Convert a DataFrame to JSON format. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], ... 'col2': [0.5, 0.75]}, ... index=['row1', 'row2']) >>> df col1 col2 row1 1 0.50 row2 2 0.75 >>> df.to_dict() {'col1': {'row1': 1, 'row2': 2}, 'col2': {'row1': 0.5, 'row2': 0.75}} You can specify the return orientation. >>> df.to_dict('series') {'col1': row1 1 row2 2 Name: col1, dtype: int64, 'col2': row1 0.50 row2 0.75 Name: col2, dtype: float64} >>> df.to_dict('split') {'index': ['row1', 'row2'], 'columns': ['col1', 'col2'], 'data': [[1, 0.5], [2, 0.75]]} >>> df.to_dict('records') [{'col1': 1, 'col2': 0.5}, {'col1': 2, 'col2': 0.75}] >>> df.to_dict('index') {'row1': {'col1': 1, 'col2': 0.5}, 'row2': {'col1': 2, 'col2': 0.75}} You can also specify the mapping type. >>> from collections import OrderedDict, defaultdict >>> df.to_dict(into=OrderedDict) OrderedDict([('col1', OrderedDict([('row1', 1), ('row2', 2)])), ('col2', OrderedDict([('row1', 0.5), ('row2', 0.75)]))]) If you want a `defaultdict`, you need to initialize it: >>> dd = defaultdict(list) >>> df.to_dict('records', into=dd) [defaultdict(<class 'list'>, {'col1': 1, 'col2': 0.5}), defaultdict(<class 'list'>, {'col1': 2, 'col2': 0.75})] """ if not self.columns.is_unique: warnings.warn("DataFrame columns are not unique, some " "columns will be omitted.", UserWarning, stacklevel=2) # GH16122 into_c = com.standardize_mapping(into) if orient.lower().startswith('d'): return into_c( (k, v.to_dict(into)) for k, v in self.items()) elif orient.lower().startswith('l'): return into_c((k, v.tolist()) for k, v in self.items()) elif orient.lower().startswith('sp'): return into_c((('index', self.index.tolist()), ('columns', self.columns.tolist()), ('data', [ list(map(com.maybe_box_datetimelike, t)) for t in self.itertuples(index=False, name=None) ]))) elif orient.lower().startswith('s'): return into_c((k, com.maybe_box_datetimelike(v)) for k, v in self.items()) elif orient.lower().startswith('r'): columns = self.columns.tolist() rows = (dict(zip(columns, row)) for row in self.itertuples(index=False, name=None)) return [ into_c((k, com.maybe_box_datetimelike(v)) for k, v in row.items()) for row in rows] elif orient.lower().startswith('i'): if not self.index.is_unique: raise ValueError( "DataFrame index must be unique for orient='index'." ) return into_c((t[0], dict(zip(self.columns, t[1:]))) for t in self.itertuples(name=None)) else: raise ValueError("orient '{o}' not understood".format(o=orient)) def to_gbq(self, destination_table, project_id=None, chunksize=None, reauth=False, if_exists='fail', auth_local_webserver=False, table_schema=None, location=None, progress_bar=True, credentials=None, verbose=None, private_key=None): """ Write a DataFrame to a Google BigQuery table. This function requires the `pandas-gbq package <https://pandas-gbq.readthedocs.io>`__. See the `How to authenticate with Google BigQuery <https://pandas-gbq.readthedocs.io/en/latest/howto/authentication.html>`__ guide for authentication instructions. Parameters ---------- destination_table : str Name of table to be written, in the form ``dataset.tablename``. project_id : str, optional Google BigQuery Account project ID. Optional when available from the environment. chunksize : int, optional Number of rows to be inserted in each chunk from the dataframe. Set to ``None`` to load the whole dataframe at once. reauth : bool, default False Force Google BigQuery to re-authenticate the user. This is useful if multiple accounts are used. if_exists : str, default 'fail' Behavior when the destination table exists. Value can be one of: ``'fail'`` If table exists, do nothing. ``'replace'`` If table exists, drop it, recreate it, and insert data. ``'append'`` If table exists, insert data. Create if does not exist. auth_local_webserver : bool, default False Use the `local webserver flow`_ instead of the `console flow`_ when getting user credentials. .. _local webserver flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server .. _console flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console *New in version 0.2.0 of pandas-gbq*. table_schema : list of dicts, optional List of BigQuery table fields to which according DataFrame columns conform to, e.g. ``[{'name': 'col1', 'type': 'STRING'},...]``. If schema is not provided, it will be generated according to dtypes of DataFrame columns. See BigQuery API documentation on available names of a field. *New in version 0.3.1 of pandas-gbq*. location : str, optional Location where the load job should run. See the `BigQuery locations documentation <https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a list of available locations. The location must match that of the target dataset. *New in version 0.5.0 of pandas-gbq*. progress_bar : bool, default True Use the library `tqdm` to show the progress bar for the upload, chunk by chunk. *New in version 0.5.0 of pandas-gbq*. credentials : google.auth.credentials.Credentials, optional Credentials for accessing Google APIs. Use this parameter to override default credentials, such as to use Compute Engine :class:`google.auth.compute_engine.Credentials` or Service Account :class:`google.oauth2.service_account.Credentials` directly. *New in version 0.8.0 of pandas-gbq*. .. versionadded:: 0.24.0 verbose : bool, deprecated Deprecated in pandas-gbq version 0.4.0. Use the `logging module to adjust verbosity instead <https://pandas-gbq.readthedocs.io/en/latest/intro.html#logging>`__. private_key : str, deprecated Deprecated in pandas-gbq version 0.8.0. Use the ``credentials`` parameter and :func:`google.oauth2.service_account.Credentials.from_service_account_info` or :func:`google.oauth2.service_account.Credentials.from_service_account_file` instead. Service account private key in JSON format. Can be file path or string contents. This is useful for remote server authentication (eg. Jupyter/IPython notebook on remote host). See Also -------- pandas_gbq.to_gbq : This function in the pandas-gbq library. read_gbq : Read a DataFrame from Google BigQuery. """ from pandas.io import gbq gbq.to_gbq(self, destination_table, project_id=project_id, chunksize=chunksize, reauth=reauth, if_exists=if_exists, auth_local_webserver=auth_local_webserver, table_schema=table_schema, location=location, progress_bar=progress_bar, credentials=credentials, verbose=verbose, private_key=private_key) @classmethod def from_records(cls, data, index=None, exclude=None, columns=None, coerce_float=False, nrows=None): """ Convert structured or record ndarray to DataFrame. Parameters ---------- data : ndarray (structured dtype), list of tuples, dict, or DataFrame index : string, list of fields, array-like Field of array to use as the index, alternately a specific set of input labels to use exclude : sequence, default None Columns or fields to exclude columns : sequence, default None Column names to use. If the passed data do not have names associated with them, this argument provides names for the columns. Otherwise this argument indicates the order of the columns in the result (any names not found in the data will become all-NA columns) coerce_float : boolean, default False Attempt to convert values of non-string, non-numeric objects (like decimal.Decimal) to floating point, useful for SQL result sets nrows : int, default None Number of rows to read if data is an iterator Returns ------- DataFrame """ # Make a copy of the input columns so we can modify it if columns is not None: columns = ensure_index(columns) if is_iterator(data): if nrows == 0: return cls() try: first_row = next(data) except StopIteration: return cls(index=index, columns=columns) dtype = None if hasattr(first_row, 'dtype') and first_row.dtype.names: dtype = first_row.dtype values = [first_row] if nrows is None: values += data else: values.extend(itertools.islice(data, nrows - 1)) if dtype is not None: data = np.array(values, dtype=dtype) else: data = values if isinstance(data, dict): if columns is None: columns = arr_columns = ensure_index(sorted(data)) arrays = [data[k] for k in columns] else: arrays = [] arr_columns = [] for k, v in data.items(): if k in columns: arr_columns.append(k) arrays.append(v) arrays, arr_columns = reorder_arrays(arrays, arr_columns, columns) elif isinstance(data, (np.ndarray, DataFrame)): arrays, columns = to_arrays(data, columns) if columns is not None: columns = ensure_index(columns) arr_columns = columns else: arrays, arr_columns = to_arrays(data, columns, coerce_float=coerce_float) arr_columns = ensure_index(arr_columns) if columns is not None: columns = ensure_index(columns) else: columns = arr_columns if exclude is None: exclude = set() else: exclude = set(exclude) result_index = None if index is not None: if (isinstance(index, str) or not hasattr(index, "__iter__")): i = columns.get_loc(index) exclude.add(index) if len(arrays) > 0: result_index = Index(arrays[i], name=index) else: result_index = Index([], name=index) else: try: index_data = [arrays[arr_columns.get_loc(field)] for field in index] result_index = ensure_index_from_sequences(index_data, names=index) exclude.update(index) except Exception: result_index = index if any(exclude): arr_exclude = [x for x in exclude if x in arr_columns] to_remove = [arr_columns.get_loc(col) for col in arr_exclude] arrays = [v for i, v in enumerate(arrays) if i not in to_remove] arr_columns = arr_columns.drop(arr_exclude) columns = columns.drop(exclude) mgr = arrays_to_mgr(arrays, arr_columns, result_index, columns) return cls(mgr) def to_records(self, index=True, convert_datetime64=None, column_dtypes=None, index_dtypes=None): """ Convert DataFrame to a NumPy record array. Index will be included as the first field of the record array if requested. Parameters ---------- index : bool, default True Include index in resulting record array, stored in 'index' field or using the index label, if set. convert_datetime64 : bool, default None .. deprecated:: 0.23.0 Whether to convert the index to datetime.datetime if it is a DatetimeIndex. column_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all columns. If a dictionary, a mapping of column names and indices (zero-indexed) to specific data types. index_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all index levels. If a dictionary, a mapping of index level names and indices (zero-indexed) to specific data types. This mapping is applied only if `index=True`. Returns ------- numpy.recarray NumPy ndarray with the DataFrame labels as fields and each row of the DataFrame as entries. See Also -------- DataFrame.from_records: Convert structured or record ndarray to DataFrame. numpy.recarray: An ndarray that allows field access using attributes, analogous to typed columns in a spreadsheet. Examples -------- >>> df = pd.DataFrame({'A': [1, 2], 'B': [0.5, 0.75]}, ... index=['a', 'b']) >>> df A B a 1 0.50 b 2 0.75 >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('index', 'O'), ('A', '<i8'), ('B', '<f8')]) If the DataFrame index has no label then the recarray field name is set to 'index'. If the index has a label then this is used as the field name: >>> df.index = df.index.rename("I") >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i8'), ('B', '<f8')]) The index can be excluded from the record array: >>> df.to_records(index=False) rec.array([(1, 0.5 ), (2, 0.75)], dtype=[('A', '<i8'), ('B', '<f8')]) Data types can be specified for the columns: >>> df.to_records(column_dtypes={"A": "int32"}) rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i4'), ('B', '<f8')]) As well as for the index: >>> df.to_records(index_dtypes="<S2") rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S2'), ('A', '<i8'), ('B', '<f8')]) >>> index_dtypes = "<S{}".format(df.index.str.len().max()) >>> df.to_records(index_dtypes=index_dtypes) rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S1'), ('A', '<i8'), ('B', '<f8')]) """ if convert_datetime64 is not None: warnings.warn("The 'convert_datetime64' parameter is " "deprecated and will be removed in a future " "version", FutureWarning, stacklevel=2) if index: if is_datetime64_any_dtype(self.index) and convert_datetime64: ix_vals = [self.index.to_pydatetime()] else: if isinstance(self.index, MultiIndex): # array of tuples to numpy cols. copy copy copy ix_vals = list(map(np.array, zip(*self.index.values))) else: ix_vals = [self.index.values] arrays = ix_vals + [self[c].get_values() for c in self.columns] count = 0 index_names = list(self.index.names) if isinstance(self.index, MultiIndex): for i, n in enumerate(index_names): if n is None: index_names[i] = 'level_%d' % count count += 1 elif index_names[0] is None: index_names = ['index'] names = [str(name) for name in itertools.chain(index_names, self.columns)] else: arrays = [self[c].get_values() for c in self.columns] names = [str(c) for c in self.columns] index_names = [] index_len = len(index_names) formats = [] for i, v in enumerate(arrays): index = i # When the names and arrays are collected, we # first collect those in the DataFrame's index, # followed by those in its columns. # # Thus, the total length of the array is: # len(index_names) + len(DataFrame.columns). # # This check allows us to see whether we are # handling a name / array in the index or column. if index < index_len: dtype_mapping = index_dtypes name = index_names[index] else: index -= index_len dtype_mapping = column_dtypes name = self.columns[index] # We have a dictionary, so we get the data type # associated with the index or column (which can # be denoted by its name in the DataFrame or its # position in DataFrame's array of indices or # columns, whichever is applicable. if is_dict_like(dtype_mapping): if name in dtype_mapping: dtype_mapping = dtype_mapping[name] elif index in dtype_mapping: dtype_mapping = dtype_mapping[index] else: dtype_mapping = None # If no mapping can be found, use the array's # dtype attribute for formatting. # # A valid dtype must either be a type or # string naming a type. if dtype_mapping is None: formats.append(v.dtype) elif isinstance(dtype_mapping, (type, np.dtype, str)): formats.append(dtype_mapping) else: element = "row" if i < index_len else "column" msg = ("Invalid dtype {dtype} specified for " "{element} {name}").format(dtype=dtype_mapping, element=element, name=name) raise ValueError(msg) return np.rec.fromarrays( arrays, dtype={'names': names, 'formats': formats} ) @classmethod def from_items(cls, items, columns=None, orient='columns'): """ Construct a DataFrame from a list of tuples. .. deprecated:: 0.23.0 `from_items` is deprecated and will be removed in a future version. Use :meth:`DataFrame.from_dict(dict(items)) <DataFrame.from_dict>` instead. :meth:`DataFrame.from_dict(OrderedDict(items)) <DataFrame.from_dict>` may be used to preserve the key order. Convert (key, value) pairs to DataFrame. The keys will be the axis index (usually the columns, but depends on the specified orientation). The values should be arrays or Series. Parameters ---------- items : sequence of (key, value) pairs Values should be arrays or Series. columns : sequence of column labels, optional Must be passed if orient='index'. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the input correspond to column labels, pass 'columns' (default). Otherwise if the keys correspond to the index, pass 'index'. Returns ------- DataFrame """ warnings.warn("from_items is deprecated. Please use " "DataFrame.from_dict(dict(items), ...) instead. " "DataFrame.from_dict(OrderedDict(items)) may be used to " "preserve the key order.", FutureWarning, stacklevel=2) keys, values = zip(*items) if orient == 'columns': if columns is not None: columns = ensure_index(columns) idict = dict(items) if len(idict) < len(items): if not columns.equals(ensure_index(keys)): raise ValueError('With non-unique item names, passed ' 'columns must be identical') arrays = values else: arrays = [idict[k] for k in columns if k in idict] else: columns = ensure_index(keys) arrays = values # GH 17312 # Provide more informative error msg when scalar values passed try: return cls._from_arrays(arrays, columns, None) except ValueError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') elif orient == 'index': if columns is None: raise TypeError("Must pass columns with orient='index'") keys = ensure_index(keys) # GH 17312 # Provide more informative error msg when scalar values passed try: arr = np.array(values, dtype=object).T data = [lib.maybe_convert_objects(v) for v in arr] return cls._from_arrays(data, columns, keys) except TypeError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') else: # pragma: no cover raise ValueError("'orient' must be either 'columns' or 'index'") @classmethod def _from_arrays(cls, arrays, columns, index, dtype=None): mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) return cls(mgr) @classmethod def from_csv(cls, path, header=0, sep=',', index_col=0, parse_dates=True, encoding=None, tupleize_cols=None, infer_datetime_format=False): """ Read CSV file. .. deprecated:: 0.21.0 Use :func:`read_csv` instead. It is preferable to use the more powerful :func:`read_csv` for most general purposes, but ``from_csv`` makes for an easy roundtrip to and from a file (the exact counterpart of ``to_csv``), especially with a DataFrame of time series data. This method only differs from the preferred :func:`read_csv` in some defaults: - `index_col` is ``0`` instead of ``None`` (take first column as index by default) - `parse_dates` is ``True`` instead of ``False`` (try parsing the index as datetime by default) So a ``pd.DataFrame.from_csv(path)`` can be replaced by ``pd.read_csv(path, index_col=0, parse_dates=True)``. Parameters ---------- path : string file path or file handle / StringIO header : int, default 0 Row to use as header (skip prior rows) sep : string, default ',' Field delimiter index_col : int or sequence, default 0 Column to use for index. If a sequence is given, a MultiIndex is used. Different default from read_table parse_dates : boolean, default True Parse dates. Different default from read_table tupleize_cols : boolean, default False write multi_index columns as a list of tuples (if True) or new (expanded format) if False) infer_datetime_format : boolean, default False If True and `parse_dates` is True for a column, try to infer the datetime format based on the first datetime string. If the format can be inferred, there often will be a large parsing speed-up. Returns ------- DataFrame See Also -------- read_csv """ warnings.warn("from_csv is deprecated. Please use read_csv(...) " "instead. Note that some of the default arguments are " "different, so please refer to the documentation " "for from_csv when changing your function calls", FutureWarning, stacklevel=2) from pandas.io.parsers import read_csv return read_csv(path, header=header, sep=sep, parse_dates=parse_dates, index_col=index_col, encoding=encoding, tupleize_cols=tupleize_cols, infer_datetime_format=infer_datetime_format) def to_sparse(self, fill_value=None, kind='block'): """ Convert to SparseDataFrame. .. deprecated:: 0.25.0 Implement the sparse version of the DataFrame meaning that any data matching a specific value it's omitted in the representation. The sparse DataFrame allows for a more efficient storage. Parameters ---------- fill_value : float, default None The specific value that should be omitted in the representation. kind : {'block', 'integer'}, default 'block' The kind of the SparseIndex tracking where data is not equal to the fill value: - 'block' tracks only the locations and sizes of blocks of data. - 'integer' keeps an array with all the locations of the data. In most cases 'block' is recommended, since it's more memory efficient. Returns ------- SparseDataFrame The sparse representation of the DataFrame. See Also -------- DataFrame.to_dense : Converts the DataFrame back to the its dense form. Examples -------- >>> df = pd.DataFrame([(np.nan, np.nan), ... (1., np.nan), ... (np.nan, 1.)]) >>> df 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(df) <class 'pandas.core.frame.DataFrame'> >>> sdf = df.to_sparse() # doctest: +SKIP >>> sdf # doctest: +SKIP 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(sdf) # doctest: +SKIP <class 'pandas.core.sparse.frame.SparseDataFrame'> """ warnings.warn("DataFrame.to_sparse is deprecated and will be removed " "in a future version", FutureWarning, stacklevel=2) from pandas.core.sparse.api import SparseDataFrame with warnings.catch_warnings(): warnings.filterwarnings("ignore", message="SparseDataFrame") return SparseDataFrame(self._series, index=self.index, columns=self.columns, default_kind=kind, default_fill_value=fill_value) @deprecate_kwarg(old_arg_name='encoding', new_arg_name=None) def to_stata(self, fname, convert_dates=None, write_index=True, encoding="latin-1", byteorder=None, time_stamp=None, data_label=None, variable_labels=None, version=114, convert_strl=None): """ Export DataFrame object to Stata dta format. Writes the DataFrame to a Stata dataset file. "dta" files contain a Stata dataset. Parameters ---------- fname : str, buffer or path object String, path object (pathlib.Path or py._path.local.LocalPath) or object implementing a binary write() function. If using a buffer then the buffer will not be automatically closed after the file data has been written. convert_dates : dict Dictionary mapping columns containing datetime types to stata internal format to use when writing the dates. Options are 'tc', 'td', 'tm', 'tw', 'th', 'tq', 'ty'. Column can be either an integer or a name. Datetime columns that do not have a conversion type specified will be converted to 'tc'. Raises NotImplementedError if a datetime column has timezone information. write_index : bool Write the index to Stata dataset. encoding : str Default is latin-1. Unicode is not supported. byteorder : str Can be ">", "<", "little", or "big". default is `sys.byteorder`. time_stamp : datetime A datetime to use as file creation date. Default is the current time. data_label : str, optional A label for the data set. Must be 80 characters or smaller. variable_labels : dict Dictionary containing columns as keys and variable labels as values. Each label must be 80 characters or smaller. .. versionadded:: 0.19.0 version : {114, 117}, default 114 Version to use in the output dta file. Version 114 can be used read by Stata 10 and later. Version 117 can be read by Stata 13 or later. Version 114 limits string variables to 244 characters or fewer while 117 allows strings with lengths up to 2,000,000 characters. .. versionadded:: 0.23.0 convert_strl : list, optional List of column names to convert to string columns to Stata StrL format. Only available if version is 117. Storing strings in the StrL format can produce smaller dta files if strings have more than 8 characters and values are repeated. .. versionadded:: 0.23.0 Raises ------ NotImplementedError * If datetimes contain timezone information * Column dtype is not representable in Stata ValueError * Columns listed in convert_dates are neither datetime64[ns] or datetime.datetime * Column listed in convert_dates is not in DataFrame * Categorical label contains more than 32,000 characters .. versionadded:: 0.19.0 See Also -------- read_stata : Import Stata data files. io.stata.StataWriter : Low-level writer for Stata data files. io.stata.StataWriter117 : Low-level writer for version 117 files. Examples -------- >>> df = pd.DataFrame({'animal': ['falcon', 'parrot', 'falcon', ... 'parrot'], ... 'speed': [350, 18, 361, 15]}) >>> df.to_stata('animals.dta') # doctest: +SKIP """ kwargs = {} if version not in (114, 117): raise ValueError('Only formats 114 and 117 supported.') if version == 114: if convert_strl is not None: raise ValueError('strl support is only available when using ' 'format 117') from pandas.io.stata import StataWriter as statawriter else: from pandas.io.stata import StataWriter117 as statawriter kwargs['convert_strl'] = convert_strl writer = statawriter(fname, self, convert_dates=convert_dates, byteorder=byteorder, time_stamp=time_stamp, data_label=data_label, write_index=write_index, variable_labels=variable_labels, **kwargs) writer.write_file() def to_feather(self, fname): """ Write out the binary feather-format for DataFrames. .. versionadded:: 0.20.0 Parameters ---------- fname : str string file path """ from pandas.io.feather_format import to_feather to_feather(self, fname) def to_parquet(self, fname, engine='auto', compression='snappy', index=None, partition_cols=None, **kwargs): """ Write a DataFrame to the binary parquet format. .. versionadded:: 0.21.0 This function writes the dataframe as a `parquet file <https://parquet.apache.org/>`_. You can choose different parquet backends, and have the option of compression. See :ref:`the user guide <io.parquet>` for more details. Parameters ---------- fname : str File path or Root Directory path. Will be used as Root Directory path while writing a partitioned dataset. .. versionchanged:: 0.24.0 engine : {'auto', 'pyarrow', 'fastparquet'}, default 'auto' Parquet library to use. If 'auto', then the option ``io.parquet.engine`` is used. The default ``io.parquet.engine`` behavior is to try 'pyarrow', falling back to 'fastparquet' if 'pyarrow' is unavailable. compression : {'snappy', 'gzip', 'brotli', None}, default 'snappy' Name of the compression to use. Use ``None`` for no compression. index : bool, default None If ``True``, include the dataframe's index(es) in the file output. If ``False``, they will not be written to the file. If ``None``, the behavior depends on the chosen engine. .. versionadded:: 0.24.0 partition_cols : list, optional, default None Column names by which to partition the dataset Columns are partitioned in the order they are given .. versionadded:: 0.24.0 **kwargs Additional arguments passed to the parquet library. See :ref:`pandas io <io.parquet>` for more details. See Also -------- read_parquet : Read a parquet file. DataFrame.to_csv : Write a csv file. DataFrame.to_sql : Write to a sql table. DataFrame.to_hdf : Write to hdf. Notes ----- This function requires either the `fastparquet <https://pypi.org/project/fastparquet>`_ or `pyarrow <https://arrow.apache.org/docs/python/>`_ library. Examples -------- >>> df = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]}) >>> df.to_parquet('df.parquet.gzip', ... compression='gzip') # doctest: +SKIP >>> pd.read_parquet('df.parquet.gzip') # doctest: +SKIP col1 col2 0 1 3 1 2 4 """ from pandas.io.parquet import to_parquet to_parquet(self, fname, engine, compression=compression, index=index, partition_cols=partition_cols, **kwargs) @Substitution(header='Whether to print column labels, default True', col_space_type='str or int', col_space='The minimum width of each column in CSS length ' 'units. An int is assumed to be px units.\n\n' ' .. versionadded:: 0.25.0\n' ' Ability to use str') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_html(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', bold_rows=True, classes=None, escape=True, notebook=False, border=None, table_id=None, render_links=False): """ Render a DataFrame as an HTML table. %(shared_params)s bold_rows : bool, default True Make the row labels bold in the output. classes : str or list or tuple, default None CSS class(es) to apply to the resulting html table. escape : bool, default True Convert the characters <, >, and & to HTML-safe sequences. notebook : {True, False}, default False Whether the generated HTML is for IPython Notebook. border : int A ``border=border`` attribute is included in the opening `<table>` tag. Default ``pd.options.display.html.border``. .. versionadded:: 0.19.0 table_id : str, optional A css id is included in the opening `<table>` tag if specified. .. versionadded:: 0.23.0 render_links : bool, default False Convert URLs to HTML links. .. versionadded:: 0.24.0 %(returns)s See Also -------- to_string : Convert DataFrame to a string. """ if (justify is not None and justify not in fmt._VALID_JUSTIFY_PARAMETERS): raise ValueError("Invalid value for justify parameter") formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, bold_rows=bold_rows, escape=escape, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, table_id=table_id, render_links=render_links) # TODO: a generic formatter wld b in DataFrameFormatter formatter.to_html(classes=classes, notebook=notebook, border=border) if buf is None: return formatter.buf.getvalue() # ---------------------------------------------------------------------- def info(self, verbose=None, buf=None, max_cols=None, memory_usage=None, null_counts=None): """ Print a concise summary of a DataFrame. This method prints information about a DataFrame including the index dtype and column dtypes, non-null values and memory usage. Parameters ---------- verbose : bool, optional Whether to print the full summary. By default, the setting in ``pandas.options.display.max_info_columns`` is followed. buf : writable buffer, defaults to sys.stdout Where to send the output. By default, the output is printed to sys.stdout. Pass a writable buffer if you need to further process the output. max_cols : int, optional When to switch from the verbose to the truncated output. If the DataFrame has more than `max_cols` columns, the truncated output is used. By default, the setting in ``pandas.options.display.max_info_columns`` is used. memory_usage : bool, str, optional Specifies whether total memory usage of the DataFrame elements (including the index) should be displayed. By default, this follows the ``pandas.options.display.memory_usage`` setting. True always show memory usage. False never shows memory usage. A value of 'deep' is equivalent to "True with deep introspection". Memory usage is shown in human-readable units (base-2 representation). Without deep introspection a memory estimation is made based in column dtype and number of rows assuming values consume the same memory amount for corresponding dtypes. With deep memory introspection, a real memory usage calculation is performed at the cost of computational resources. null_counts : bool, optional Whether to show the non-null counts. By default, this is shown only if the frame is smaller than ``pandas.options.display.max_info_rows`` and ``pandas.options.display.max_info_columns``. A value of True always shows the counts, and False never shows the counts. Returns ------- None This method prints a summary of a DataFrame and returns None. See Also -------- DataFrame.describe: Generate descriptive statistics of DataFrame columns. DataFrame.memory_usage: Memory usage of DataFrame columns. Examples -------- >>> int_values = [1, 2, 3, 4, 5] >>> text_values = ['alpha', 'beta', 'gamma', 'delta', 'epsilon'] >>> float_values = [0.0, 0.25, 0.5, 0.75, 1.0] >>> df = pd.DataFrame({"int_col": int_values, "text_col": text_values, ... "float_col": float_values}) >>> df int_col text_col float_col 0 1 alpha 0.00 1 2 beta 0.25 2 3 gamma 0.50 3 4 delta 0.75 4 5 epsilon 1.00 Prints information of all columns: >>> df.info(verbose=True) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Data columns (total 3 columns): int_col 5 non-null int64 text_col 5 non-null object float_col 5 non-null float64 dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Prints a summary of columns count and its dtypes but not per column information: >>> df.info(verbose=False) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Columns: 3 entries, int_col to float_col dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Pipe output of DataFrame.info to buffer instead of sys.stdout, get buffer content and writes to a text file: >>> import io >>> buffer = io.StringIO() >>> df.info(buf=buffer) >>> s = buffer.getvalue() >>> with open("df_info.txt", "w", ... encoding="utf-8") as f: # doctest: +SKIP ... f.write(s) 260 The `memory_usage` parameter allows deep introspection mode, specially useful for big DataFrames and fine-tune memory optimization: >>> random_strings_array = np.random.choice(['a', 'b', 'c'], 10 ** 6) >>> df = pd.DataFrame({ ... 'column_1': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_2': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_3': np.random.choice(['a', 'b', 'c'], 10 ** 6) ... }) >>> df.info() <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 22.9+ MB >>> df.info(memory_usage='deep') <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 188.8 MB """ if buf is None: # pragma: no cover buf = sys.stdout lines = [] lines.append(str(type(self))) lines.append(self.index._summary()) if len(self.columns) == 0: lines.append('Empty {name}'.format(name=type(self).__name__)) fmt.buffer_put_lines(buf, lines) return cols = self.columns # hack if max_cols is None: max_cols = get_option('display.max_info_columns', len(self.columns) + 1) max_rows = get_option('display.max_info_rows', len(self) + 1) if null_counts is None: show_counts = ((len(self.columns) <= max_cols) and (len(self) < max_rows)) else: show_counts = null_counts exceeds_info_cols = len(self.columns) > max_cols def _verbose_repr(): lines.append('Data columns (total %d columns):' % len(self.columns)) space = max(len(pprint_thing(k)) for k in self.columns) + 4 counts = None tmpl = "{count}{dtype}" if show_counts: counts = self.count() if len(cols) != len(counts): # pragma: no cover raise AssertionError( 'Columns must equal counts ' '({cols:d} != {counts:d})'.format( cols=len(cols), counts=len(counts))) tmpl = "{count} non-null {dtype}" dtypes = self.dtypes for i, col in enumerate(self.columns): dtype = dtypes.iloc[i] col = pprint_thing(col) count = "" if show_counts: count = counts.iloc[i] lines.append(_put_str(col, space) + tmpl.format(count=count, dtype=dtype)) def _non_verbose_repr(): lines.append(self.columns._summary(name='Columns')) def _sizeof_fmt(num, size_qualifier): # returns size in human readable format for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: if num < 1024.0: return ("{num:3.1f}{size_q} " "{x}".format(num=num, size_q=size_qualifier, x=x)) num /= 1024.0 return "{num:3.1f}{size_q} {pb}".format(num=num, size_q=size_qualifier, pb='PB') if verbose: _verbose_repr() elif verbose is False: # specifically set to False, not nesc None _non_verbose_repr() else: if exceeds_info_cols: _non_verbose_repr() else: _verbose_repr() counts = self.get_dtype_counts() dtypes = ['{k}({kk:d})'.format(k=k[0], kk=k[1]) for k in sorted(counts.items())] lines.append('dtypes: {types}'.format(types=', '.join(dtypes))) if memory_usage is None: memory_usage = get_option('display.memory_usage') if memory_usage: # append memory usage of df to display size_qualifier = '' if memory_usage == 'deep': deep = True else: # size_qualifier is just a best effort; not guaranteed to catch # all cases (e.g., it misses categorical data even with object # categories) deep = False if ('object' in counts or self.index._is_memory_usage_qualified()): size_qualifier = '+' mem_usage = self.memory_usage(index=True, deep=deep).sum() lines.append("memory usage: {mem}\n".format( mem=_sizeof_fmt(mem_usage, size_qualifier))) fmt.buffer_put_lines(buf, lines) def memory_usage(self, index=True, deep=False): """ Return the memory usage of each column in bytes. The memory usage can optionally include the contribution of the index and elements of `object` dtype. This value is displayed in `DataFrame.info` by default. This can be suppressed by setting ``pandas.options.display.memory_usage`` to False. Parameters ---------- index : bool, default True Specifies whether to include the memory usage of the DataFrame's index in returned Series. If ``index=True``, the memory usage of the index is the first item in the output. deep : bool, default False If True, introspect the data deeply by interrogating `object` dtypes for system-level memory consumption, and include it in the returned values. Returns ------- Series A Series whose index is the original column names and whose values is the memory usage of each column in bytes. See Also -------- numpy.ndarray.nbytes : Total bytes consumed by the elements of an ndarray. Series.memory_usage : Bytes consumed by a Series. Categorical : Memory-efficient array for string values with many repeated values. DataFrame.info : Concise summary of a DataFrame. Examples -------- >>> dtypes = ['int64', 'float64', 'complex128', 'object', 'bool'] >>> data = dict([(t, np.ones(shape=5000).astype(t)) ... for t in dtypes]) >>> df = pd.DataFrame(data) >>> df.head() int64 float64 complex128 object bool 0 1 1.0 1.0+0.0j 1 True 1 1 1.0 1.0+0.0j 1 True 2 1 1.0 1.0+0.0j 1 True 3 1 1.0 1.0+0.0j 1 True 4 1 1.0 1.0+0.0j 1 True >>> df.memory_usage() Index 128 int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 >>> df.memory_usage(index=False) int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 The memory footprint of `object` dtype columns is ignored by default: >>> df.memory_usage(deep=True) Index 128 int64 40000 float64 40000 complex128 80000 object 160000 bool 5000 dtype: int64 Use a Categorical for efficient storage of an object-dtype column with many repeated values. >>> df['object'].astype('category').memory_usage(deep=True) 5216 """ result = Series([c.memory_usage(index=False, deep=deep) for col, c in self.iteritems()], index=self.columns) if index: result = Series(self.index.memory_usage(deep=deep), index=['Index']).append(result) return result def transpose(self, *args, **kwargs): """ Transpose index and columns. Reflect the DataFrame over its main diagonal by writing rows as columns and vice-versa. The property :attr:`.T` is an accessor to the method :meth:`transpose`. Parameters ---------- copy : bool, default False If True, the underlying data is copied. Otherwise (default), no copy is made if possible. *args, **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame The transposed DataFrame. See Also -------- numpy.transpose : Permute the dimensions of a given array. Notes ----- Transposing a DataFrame with mixed dtypes will result in a homogeneous DataFrame with the `object` dtype. In such a case, a copy of the data is always made. Examples -------- **Square DataFrame with homogeneous dtype** >>> d1 = {'col1': [1, 2], 'col2': [3, 4]} >>> df1 = pd.DataFrame(data=d1) >>> df1 col1 col2 0 1 3 1 2 4 >>> df1_transposed = df1.T # or df1.transpose() >>> df1_transposed 0 1 col1 1 2 col2 3 4 When the dtype is homogeneous in the original DataFrame, we get a transposed DataFrame with the same dtype: >>> df1.dtypes col1 int64 col2 int64 dtype: object >>> df1_transposed.dtypes 0 int64 1 int64 dtype: object **Non-square DataFrame with mixed dtypes** >>> d2 = {'name': ['Alice', 'Bob'], ... 'score': [9.5, 8], ... 'employed': [False, True], ... 'kids': [0, 0]} >>> df2 = pd.DataFrame(data=d2) >>> df2 name score employed kids 0 Alice 9.5 False 0 1 Bob 8.0 True 0 >>> df2_transposed = df2.T # or df2.transpose() >>> df2_transposed 0 1 name Alice Bob score 9.5 8 employed False True kids 0 0 When the DataFrame has mixed dtypes, we get a transposed DataFrame with the `object` dtype: >>> df2.dtypes name object score float64 employed bool kids int64 dtype: object >>> df2_transposed.dtypes 0 object 1 object dtype: object """ nv.validate_transpose(args, dict()) return super().transpose(1, 0, **kwargs) T = property(transpose) # ---------------------------------------------------------------------- # Picklability # legacy pickle formats def _unpickle_frame_compat(self, state): # pragma: no cover if len(state) == 2: # pragma: no cover series, idx = state columns = sorted(series) else: series, cols, idx = state columns = com._unpickle_array(cols) index = com._unpickle_array(idx) self._data = self._init_dict(series, index, columns, None) def _unpickle_matrix_compat(self, state): # pragma: no cover # old unpickling (vals, idx, cols), object_state = state index = com._unpickle_array(idx) dm = DataFrame(vals, index=index, columns=com._unpickle_array(cols), copy=False) if object_state is not None: ovals, _, ocols = object_state objects = DataFrame(ovals, index=index, columns=com._unpickle_array(ocols), copy=False) dm = dm.join(objects) self._data = dm._data # ---------------------------------------------------------------------- # Getting and setting elements def get_value(self, index, col, takeable=False): """ Quickly retrieve single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label takeable : interpret the index/col as indexers, default False Returns ------- scalar """ warnings.warn("get_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._get_value(index, col, takeable=takeable) def _get_value(self, index, col, takeable=False): if takeable: series = self._iget_item_cache(col) return com.maybe_box_datetimelike(series._values[index]) series = self._get_item_cache(col) engine = self.index._engine try: return engine.get_value(series._values, index) except KeyError: # GH 20629 if self.index.nlevels > 1: # partial indexing forbidden raise except (TypeError, ValueError): pass # we cannot handle direct indexing # use positional col = self.columns.get_loc(col) index = self.index.get_loc(index) return self._get_value(index, col, takeable=True) _get_value.__doc__ = get_value.__doc__ def set_value(self, index, col, value, takeable=False): """ Put single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label value : scalar takeable : interpret the index/col as indexers, default False Returns ------- DataFrame If label pair is contained, will be reference to calling DataFrame, otherwise a new object. """ warnings.warn("set_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._set_value(index, col, value, takeable=takeable) def _set_value(self, index, col, value, takeable=False): try: if takeable is True: series = self._iget_item_cache(col) return series._set_value(index, value, takeable=True) series = self._get_item_cache(col) engine = self.index._engine engine.set_value(series._values, index, value) return self except (KeyError, TypeError): # set using a non-recursive method & reset the cache if takeable: self.iloc[index, col] = value else: self.loc[index, col] = value self._item_cache.pop(col, None) return self _set_value.__doc__ = set_value.__doc__ def _ixs(self, i, axis=0): """ Parameters ---------- i : int, slice, or sequence of integers axis : int Notes ----- If slice passed, the resulting data will be a view. """ # irow if axis == 0: if isinstance(i, slice): return self[i] else: label = self.index[i] if isinstance(label, Index): # a location index by definition result = self.take(i, axis=axis) copy = True else: new_values = self._data.fast_xs(i) if is_scalar(new_values): return new_values # if we are a copy, mark as such copy = (isinstance(new_values, np.ndarray) and new_values.base is None) result = self._constructor_sliced(new_values, index=self.columns, name=self.index[i], dtype=new_values.dtype) result._set_is_copy(self, copy=copy) return result # icol else: label = self.columns[i] if isinstance(i, slice): # need to return view lab_slice = slice(label[0], label[-1]) return self.loc[:, lab_slice] else: if isinstance(label, Index): return self._take(i, axis=1) index_len = len(self.index) # if the values returned are not the same length # as the index (iow a not found value), iget returns # a 0-len ndarray. This is effectively catching # a numpy error (as numpy should really raise) values = self._data.iget(i) if index_len and not len(values): values = np.array([np.nan] * index_len, dtype=object) result = self._box_col_values(values, label) # this is a cached value, mark it so result._set_as_cached(label, self) return result def __getitem__(self, key): key = lib.item_from_zerodim(key) key = com.apply_if_callable(key, self) # shortcut if the key is in columns try: if self.columns.is_unique and key in self.columns: if self.columns.nlevels > 1: return self._getitem_multilevel(key) return self._get_item_cache(key) except (TypeError, ValueError): # The TypeError correctly catches non hashable "key" (e.g. list) # The ValueError can be removed once GH #21729 is fixed pass # Do we have a slicer (on rows)? indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._slice(indexer, axis=0) # Do we have a (boolean) DataFrame? if isinstance(key, DataFrame): return self._getitem_frame(key) # Do we have a (boolean) 1d indexer? if com.is_bool_indexer(key): return self._getitem_bool_array(key) # We are left with two options: a single key, and a collection of keys, # We interpret tuples as collections only for non-MultiIndex is_single_key = isinstance(key, tuple) or not is_list_like(key) if is_single_key: if self.columns.nlevels > 1: return self._getitem_multilevel(key) indexer = self.columns.get_loc(key) if is_integer(indexer): indexer = [indexer] else: if is_iterator(key): key = list(key) indexer = self.loc._convert_to_indexer(key, axis=1, raise_missing=True) # take() does not accept boolean indexers if getattr(indexer, "dtype", None) == bool: indexer = np.where(indexer)[0] data = self._take(indexer, axis=1) if is_single_key: # What does looking for a single key in a non-unique index return? # The behavior is inconsistent. It returns a Series, except when # - the key itself is repeated (test on data.shape, #9519), or # - we have a MultiIndex on columns (test on self.columns, #21309) if data.shape[1] == 1 and not isinstance(self.columns, MultiIndex): data = data[key] return data def _getitem_bool_array(self, key): # also raises Exception if object array with NA values # warning here just in case -- previously __setitem__ was # reindexing but __getitem__ was not; it seems more reasonable to # go with the __setitem__ behavior since that is more consistent # with all other indexing behavior if isinstance(key, Series) and not key.index.equals(self.index): warnings.warn("Boolean Series key will be reindexed to match " "DataFrame index.", UserWarning, stacklevel=3) elif len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d.' % (len(key), len(self.index))) # check_bool_indexer will throw exception if Series key cannot # be reindexed to match DataFrame rows key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] return self._take(indexer, axis=0) def _getitem_multilevel(self, key): loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): new_columns = self.columns[loc] result_columns = maybe_droplevels(new_columns, key) if self._is_mixed_type: result = self.reindex(columns=new_columns) result.columns = result_columns else: new_values = self.values[:, loc] result = self._constructor(new_values, index=self.index, columns=result_columns) result = result.__finalize__(self) # If there is only one column being returned, and its name is # either an empty string, or a tuple with an empty string as its # first element, then treat the empty string as a placeholder # and return the column as if the user had provided that empty # string in the key. If the result is a Series, exclude the # implied empty string from its name. if len(result.columns) == 1: top = result.columns[0] if isinstance(top, tuple): top = top[0] if top == '': result = result[''] if isinstance(result, Series): result = self._constructor_sliced(result, index=self.index, name=key) result._set_is_copy(self) return result else: return self._get_item_cache(key) def _getitem_frame(self, key): if key.values.size and not is_bool_dtype(key.values): raise ValueError('Must pass DataFrame with boolean values only') return self.where(key) def query(self, expr, inplace=False, **kwargs): """ Query the columns of a DataFrame with a boolean expression. Parameters ---------- expr : str The query string to evaluate. You can refer to variables in the environment by prefixing them with an '@' character like ``@a + b``. .. versionadded:: 0.25.0 You can refer to column names that contain spaces by surrounding them in backticks. For example, if one of your columns is called ``a a`` and you want to sum it with ``b``, your query should be ```a a` + b``. inplace : bool Whether the query should modify the data in place or return a modified copy. **kwargs See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`DataFrame.query`. .. versionadded:: 0.18.0 Returns ------- DataFrame DataFrame resulting from the provided query expression. See Also -------- eval : Evaluate a string describing operations on DataFrame columns. DataFrame.eval : Evaluate a string describing operations on DataFrame columns. Notes ----- The result of the evaluation of this expression is first passed to :attr:`DataFrame.loc` and if that fails because of a multidimensional key (e.g., a DataFrame) then the result will be passed to :meth:`DataFrame.__getitem__`. This method uses the top-level :func:`eval` function to evaluate the passed query. The :meth:`~pandas.DataFrame.query` method uses a slightly modified Python syntax by default. For example, the ``&`` and ``|`` (bitwise) operators have the precedence of their boolean cousins, :keyword:`and` and :keyword:`or`. This *is* syntactically valid Python, however the semantics are different. You can change the semantics of the expression by passing the keyword argument ``parser='python'``. This enforces the same semantics as evaluation in Python space. Likewise, you can pass ``engine='python'`` to evaluate an expression using Python itself as a backend. This is not recommended as it is inefficient compared to using ``numexpr`` as the engine. The :attr:`DataFrame.index` and :attr:`DataFrame.columns` attributes of the :class:`~pandas.DataFrame` instance are placed in the query namespace by default, which allows you to treat both the index and columns of the frame as a column in the frame. The identifier ``index`` is used for the frame index; you can also use the name of the index to identify it in a query. Please note that Python keywords may not be used as identifiers. For further details and examples see the ``query`` documentation in :ref:`indexing <indexing.query>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), ... 'B': range(10, 0, -2), ... 'C C': range(10, 5, -1)}) >>> df A B C C 0 1 10 10 1 2 8 9 2 3 6 8 3 4 4 7 4 5 2 6 >>> df.query('A > B') A B C C 4 5 2 6 The previous expression is equivalent to >>> df[df.A > df.B] A B C C 4 5 2 6 For columns with spaces in their name, you can use backtick quoting. >>> df.query('B == `C C`') A B C C 0 1 10 10 The previous expression is equivalent to >>> df[df.B == df['C C']] A B C C 0 1 10 10 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(expr, str): msg = "expr must be a string to be evaluated, {0} given" raise ValueError(msg.format(type(expr))) kwargs['level'] = kwargs.pop('level', 0) + 1 kwargs['target'] = None res = self.eval(expr, **kwargs) try: new_data = self.loc[res] except ValueError: # when res is multi-dimensional loc raises, but this is sometimes a # valid query new_data = self[res] if inplace: self._update_inplace(new_data) else: return new_data def eval(self, expr, inplace=False, **kwargs): """ Evaluate a string describing operations on DataFrame columns. Operates on columns only, not specific rows or elements. This allows `eval` to run arbitrary code, which can make you vulnerable to code injection if you pass user input to this function. Parameters ---------- expr : str The expression string to evaluate. inplace : bool, default False If the expression contains an assignment, whether to perform the operation inplace and mutate the existing DataFrame. Otherwise, a new DataFrame is returned. .. versionadded:: 0.18.0. kwargs : dict See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`~pandas.DataFrame.query`. Returns ------- ndarray, scalar, or pandas object The result of the evaluation. See Also -------- DataFrame.query : Evaluates a boolean expression to query the columns of a frame. DataFrame.assign : Can evaluate an expression or function to create new values for a column. eval : Evaluate a Python expression as a string using various backends. Notes ----- For more details see the API documentation for :func:`~eval`. For detailed examples see :ref:`enhancing performance with eval <enhancingperf.eval>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), 'B': range(10, 0, -2)}) >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 >>> df.eval('A + B') 0 11 1 10 2 9 3 8 4 7 dtype: int64 Assignment is allowed though by default the original DataFrame is not modified. >>> df.eval('C = A + B') A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 Use ``inplace=True`` to modify the original DataFrame. >>> df.eval('C = A + B', inplace=True) >>> df A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 """ from pandas.core.computation.eval import eval as _eval inplace = validate_bool_kwarg(inplace, 'inplace') resolvers = kwargs.pop('resolvers', None) kwargs['level'] = kwargs.pop('level', 0) + 1 if resolvers is None: index_resolvers = self._get_index_resolvers() column_resolvers = \ self._get_space_character_free_column_resolvers() resolvers = column_resolvers, index_resolvers if 'target' not in kwargs: kwargs['target'] = self kwargs['resolvers'] = kwargs.get('resolvers', ()) + tuple(resolvers) return _eval(expr, inplace=inplace, **kwargs) def select_dtypes(self, include=None, exclude=None): """ Return a subset of the DataFrame's columns based on the column dtypes. Parameters ---------- include, exclude : scalar or list-like A selection of dtypes or strings to be included/excluded. At least one of these parameters must be supplied. Returns ------- DataFrame The subset of the frame including the dtypes in ``include`` and excluding the dtypes in ``exclude``. Raises ------ ValueError * If both of ``include`` and ``exclude`` are empty * If ``include`` and ``exclude`` have overlapping elements * If any kind of string dtype is passed in. Notes ----- * To select all *numeric* types, use ``np.number`` or ``'number'`` * To select strings you must use the ``object`` dtype, but note that this will return *all* object dtype columns * See the `numpy dtype hierarchy <http://docs.scipy.org/doc/numpy/reference/arrays.scalars.html>`__ * To select datetimes, use ``np.datetime64``, ``'datetime'`` or ``'datetime64'`` * To select timedeltas, use ``np.timedelta64``, ``'timedelta'`` or ``'timedelta64'`` * To select Pandas categorical dtypes, use ``'category'`` * To select Pandas datetimetz dtypes, use ``'datetimetz'`` (new in 0.20.0) or ``'datetime64[ns, tz]'`` Examples -------- >>> df = pd.DataFrame({'a': [1, 2] * 3, ... 'b': [True, False] * 3, ... 'c': [1.0, 2.0] * 3}) >>> df a b c 0 1 True 1.0 1 2 False 2.0 2 1 True 1.0 3 2 False 2.0 4 1 True 1.0 5 2 False 2.0 >>> df.select_dtypes(include='bool') b 0 True 1 False 2 True 3 False 4 True 5 False >>> df.select_dtypes(include=['float64']) c 0 1.0 1 2.0 2 1.0 3 2.0 4 1.0 5 2.0 >>> df.select_dtypes(exclude=['int']) b c 0 True 1.0 1 False 2.0 2 True 1.0 3 False 2.0 4 True 1.0 5 False 2.0 """ def _get_info_slice(obj, indexer): """Slice the info axis of `obj` with `indexer`.""" if not hasattr(obj, '_info_axis_number'): msg = 'object of type {typ!r} has no info axis' raise TypeError(msg.format(typ=type(obj).__name__)) slices = [slice(None)] * obj.ndim slices[obj._info_axis_number] = indexer return tuple(slices) if not is_list_like(include): include = (include,) if include is not None else () if not is_list_like(exclude): exclude = (exclude,) if exclude is not None else () selection = tuple(map(frozenset, (include, exclude))) if not any(selection): raise ValueError('at least one of include or exclude must be ' 'nonempty') # convert the myriad valid dtypes object to a single representation include, exclude = map( lambda x: frozenset(map(infer_dtype_from_object, x)), selection) for dtypes in (include, exclude): invalidate_string_dtypes(dtypes) # can't both include AND exclude! if not include.isdisjoint(exclude): raise ValueError('include and exclude overlap on {inc_ex}'.format( inc_ex=(include & exclude))) # empty include/exclude -> defaults to True # three cases (we've already raised if both are empty) # case 1: empty include, nonempty exclude # we have True, True, ... True for include, same for exclude # in the loop below we get the excluded # and when we call '&' below we get only the excluded # case 2: nonempty include, empty exclude # same as case 1, but with include # case 3: both nonempty # the "union" of the logic of case 1 and case 2: # we get the included and excluded, and return their logical and include_these = Series(not bool(include), index=self.columns) exclude_these = Series(not bool(exclude), index=self.columns) def is_dtype_instance_mapper(idx, dtype): return idx, functools.partial(issubclass, dtype.type) for idx, f in itertools.starmap(is_dtype_instance_mapper, enumerate(self.dtypes)): if include: # checks for the case of empty include or exclude include_these.iloc[idx] = any(map(f, include)) if exclude: exclude_these.iloc[idx] = not any(map(f, exclude)) dtype_indexer = include_these & exclude_these return self.loc[_get_info_slice(self, dtype_indexer)] def _box_item_values(self, key, values): items = self.columns[self.columns.get_loc(key)] if values.ndim == 2: return self._constructor(values.T, columns=items, index=self.index) else: return self._box_col_values(values, items) def _box_col_values(self, values, items): """ Provide boxed values for a column. """ klass = self._constructor_sliced return klass(values, index=self.index, name=items, fastpath=True) def __setitem__(self, key, value): key = com.apply_if_callable(key, self) # see if we can slice the rows indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._setitem_slice(indexer, value) if isinstance(key, DataFrame) or getattr(key, 'ndim', None) == 2: self._setitem_frame(key, value) elif isinstance(key, (Series, np.ndarray, list, Index)): self._setitem_array(key, value) else: # set column self._set_item(key, value) def _setitem_slice(self, key, value): self._check_setitem_copy() self.loc._setitem_with_indexer(key, value) def _setitem_array(self, key, value): # also raises Exception if object array with NA values if com.is_bool_indexer(key): if len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d!' % (len(key), len(self.index))) key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] self._check_setitem_copy() self.loc._setitem_with_indexer(indexer, value) else: if isinstance(value, DataFrame): if len(value.columns) != len(key): raise ValueError('Columns must be same length as key') for k1, k2 in zip(key, value.columns): self[k1] = value[k2] else: indexer = self.loc._convert_to_indexer(key, axis=1) self._check_setitem_copy() self.loc._setitem_with_indexer((slice(None), indexer), value) def _setitem_frame(self, key, value): # support boolean setting with DataFrame input, e.g. # df[df > df2] = 0 if isinstance(key, np.ndarray): if key.shape != self.shape: raise ValueError( 'Array conditional must be same shape as self' ) key = self._constructor(key, **self._construct_axes_dict()) if key.values.size and not is_bool_dtype(key.values): raise TypeError( 'Must pass DataFrame or 2-d ndarray with boolean values only' ) self._check_inplace_setting(value) self._check_setitem_copy() self._where(-key, value, inplace=True) def _ensure_valid_index(self, value): """ Ensure that if we don't have an index, that we can create one from the passed value. """ # GH5632, make sure that we are a Series convertible if not len(self.index) and is_list_like(value): try: value = Series(value) except (ValueError, NotImplementedError, TypeError): raise ValueError('Cannot set a frame with no defined index ' 'and a value that cannot be converted to a ' 'Series') self._data = self._data.reindex_axis(value.index.copy(), axis=1, fill_value=np.nan) def _set_item(self, key, value): """ Add series to DataFrame in specified column. If series is a numpy-array (not a Series/TimeSeries), it must be the same length as the DataFrames index or an error will be thrown. Series/TimeSeries will be conformed to the DataFrames index to ensure homogeneity. """ self._ensure_valid_index(value) value = self._sanitize_column(key, value) NDFrame._set_item(self, key, value) # check if we are modifying a copy # try to set first as we want an invalid # value exception to occur first if len(self): self._check_setitem_copy() def insert(self, loc, column, value, allow_duplicates=False): """ Insert column into DataFrame at specified location. Raises a ValueError if `column` is already contained in the DataFrame, unless `allow_duplicates` is set to True. Parameters ---------- loc : int Insertion index. Must verify 0 <= loc <= len(columns) column : string, number, or hashable object label of the inserted column value : int, Series, or array-like allow_duplicates : bool, optional """ self._ensure_valid_index(value) value = self._sanitize_column(column, value, broadcast=False) self._data.insert(loc, column, value, allow_duplicates=allow_duplicates) def assign(self, **kwargs): r""" Assign new columns to a DataFrame. Returns a new object with all original columns in addition to new ones. Existing columns that are re-assigned will be overwritten. Parameters ---------- **kwargs : dict of {str: callable or Series} The column names are keywords. If the values are callable, they are computed on the DataFrame and assigned to the new columns. The callable must not change input DataFrame (though pandas doesn't check it). If the values are not callable, (e.g. a Series, scalar, or array), they are simply assigned. Returns ------- DataFrame A new DataFrame with the new columns in addition to all the existing columns. Notes ----- Assigning multiple columns within the same ``assign`` is possible. For Python 3.6 and above, later items in '\*\*kwargs' may refer to newly created or modified columns in 'df'; items are computed and assigned into 'df' in order. For Python 3.5 and below, the order of keyword arguments is not specified, you cannot refer to newly created or modified columns. All items are computed first, and then assigned in alphabetical order. .. versionchanged :: 0.23.0 Keyword argument order is maintained for Python 3.6 and later. Examples -------- >>> df = pd.DataFrame({'temp_c': [17.0, 25.0]}, ... index=['Portland', 'Berkeley']) >>> df temp_c Portland 17.0 Berkeley 25.0 Where the value is a callable, evaluated on `df`: >>> df.assign(temp_f=lambda x: x.temp_c * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 Alternatively, the same behavior can be achieved by directly referencing an existing Series or sequence: >>> df.assign(temp_f=df['temp_c'] * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 In Python 3.6+, you can create multiple columns within the same assign where one of the columns depends on another one defined within the same assign: >>> df.assign(temp_f=lambda x: x['temp_c'] * 9 / 5 + 32, ... temp_k=lambda x: (x['temp_f'] + 459.67) * 5 / 9) temp_c temp_f temp_k Portland 17.0 62.6 290.15 Berkeley 25.0 77.0 298.15 """ data = self.copy() # >= 3.6 preserve order of kwargs if PY36: for k, v in kwargs.items(): data[k] = com.apply_if_callable(v, data) else: # <= 3.5: do all calculations first... results = OrderedDict() for k, v in kwargs.items(): results[k] = com.apply_if_callable(v, data) # <= 3.5 and earlier results = sorted(results.items()) # ... and then assign for k, v in results: data[k] = v return data def _sanitize_column(self, key, value, broadcast=True): """ Ensures new columns (which go into the BlockManager as new blocks) are always copied and converted into an array. Parameters ---------- key : object value : scalar, Series, or array-like broadcast : bool, default True If ``key`` matches multiple duplicate column names in the DataFrame, this parameter indicates whether ``value`` should be tiled so that the returned array contains a (duplicated) column for each occurrence of the key. If False, ``value`` will not be tiled. Returns ------- numpy.ndarray """ def reindexer(value): # reindex if necessary if value.index.equals(self.index) or not len(self.index): value = value._values.copy() else: # GH 4107 try: value = value.reindex(self.index)._values except Exception as e: # duplicate axis if not value.index.is_unique: raise e # other raise TypeError('incompatible index of inserted column ' 'with frame index') return value if isinstance(value, Series): value = reindexer(value) elif isinstance(value, DataFrame): # align right-hand-side columns if self.columns # is multi-index and self[key] is a sub-frame if isinstance(self.columns, MultiIndex) and key in self.columns: loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): cols = maybe_droplevels(self.columns[loc], key) if len(cols) and not cols.equals(value.columns): value = value.reindex(cols, axis=1) # now align rows value = reindexer(value).T elif isinstance(value, ExtensionArray): # Explicitly copy here, instead of in sanitize_index, # as sanitize_index won't copy an EA, even with copy=True value = value.copy() value = sanitize_index(value, self.index, copy=False) elif isinstance(value, Index) or is_sequence(value): # turn me into an ndarray value = sanitize_index(value, self.index, copy=False) if not isinstance(value, (np.ndarray, Index)): if isinstance(value, list) and len(value) > 0: value = maybe_convert_platform(value) else: value = com.asarray_tuplesafe(value) elif value.ndim == 2: value = value.copy().T elif isinstance(value, Index): value = value.copy(deep=True) else: value = value.copy() # possibly infer to datetimelike if is_object_dtype(value.dtype): value = maybe_infer_to_datetimelike(value) else: # cast ignores pandas dtypes. so save the dtype first infer_dtype, _ = infer_dtype_from_scalar( value, pandas_dtype=True) # upcast value = cast_scalar_to_array(len(self.index), value) value = maybe_cast_to_datetime(value, infer_dtype) # return internal types directly if is_extension_type(value) or is_extension_array_dtype(value): return value # broadcast across multiple columns if necessary if broadcast and key in self.columns and value.ndim == 1: if (not self.columns.is_unique or isinstance(self.columns, MultiIndex)): existing_piece = self[key] if isinstance(existing_piece, DataFrame): value = np.tile(value, (len(existing_piece.columns), 1)) return np.atleast_2d(np.asarray(value)) @property def _series(self): return {item: Series(self._data.iget(idx), index=self.index, name=item) for idx, item in enumerate(self.columns)} def lookup(self, row_labels, col_labels): """ Label-based "fancy indexing" function for DataFrame. Given equal-length arrays of row and column labels, return an array of the values corresponding to each (row, col) pair. Parameters ---------- row_labels : sequence The row labels to use for lookup col_labels : sequence The column labels to use for lookup Returns ------- numpy.ndarray Notes ----- Akin to:: result = [df.get_value(row, col) for row, col in zip(row_labels, col_labels)] Examples -------- values : ndarray The found values """ n = len(row_labels) if n != len(col_labels): raise ValueError('Row labels must have same size as column labels') thresh = 1000 if not self._is_mixed_type or n > thresh: values = self.values ridx = self.index.get_indexer(row_labels) cidx = self.columns.get_indexer(col_labels) if (ridx == -1).any(): raise KeyError('One or more row labels was not found') if (cidx == -1).any(): raise KeyError('One or more column labels was not found') flat_index = ridx * len(self.columns) + cidx result = values.flat[flat_index] else: result = np.empty(n, dtype='O') for i, (r, c) in enumerate(zip(row_labels, col_labels)): result[i] = self._get_value(r, c) if is_object_dtype(result): result = lib.maybe_convert_objects(result) return result # ---------------------------------------------------------------------- # Reindexing and alignment def _reindex_axes(self, axes, level, limit, tolerance, method, fill_value, copy): frame = self columns = axes['columns'] if columns is not None: frame = frame._reindex_columns(columns, method, copy, level, fill_value, limit, tolerance) index = axes['index'] if index is not None: frame = frame._reindex_index(index, method, copy, level, fill_value, limit, tolerance) return frame def _reindex_index(self, new_index, method, copy, level, fill_value=np.nan, limit=None, tolerance=None): new_index, indexer = self.index.reindex(new_index, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({0: [new_index, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_columns(self, new_columns, method, copy, level, fill_value=None, limit=None, tolerance=None): new_columns, indexer = self.columns.reindex(new_columns, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({1: [new_columns, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_multi(self, axes, copy, fill_value): """ We are guaranteed non-Nones in the axes. """ new_index, row_indexer = self.index.reindex(axes['index']) new_columns, col_indexer = self.columns.reindex(axes['columns']) if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer new_values = algorithms.take_2d_multi(self.values, indexer, fill_value=fill_value) return self._constructor(new_values, index=new_index, columns=new_columns) else: return self._reindex_with_indexers({0: [new_index, row_indexer], 1: [new_columns, col_indexer]}, copy=copy, fill_value=fill_value) @Appender(_shared_docs['align'] % _shared_doc_kwargs) def align(self, other, join='outer', axis=None, level=None, copy=True, fill_value=None, method=None, limit=None, fill_axis=0, broadcast_axis=None): return super().align(other, join=join, axis=axis, level=level, copy=copy, fill_value=fill_value, method=method, limit=limit, fill_axis=fill_axis, broadcast_axis=broadcast_axis) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.reindex.__doc__) @rewrite_axis_style_signature('labels', [('method', None), ('copy', True), ('level', None), ('fill_value', np.nan), ('limit', None), ('tolerance', None)]) def reindex(self, *args, **kwargs): axes = validate_axis_style_args(self, args, kwargs, 'labels', 'reindex') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('labels', None) return super().reindex(**kwargs) @Appender(_shared_docs['reindex_axis'] % _shared_doc_kwargs) def reindex_axis(self, labels, axis=0, method=None, level=None, copy=True, limit=None, fill_value=np.nan): return super().reindex_axis(labels=labels, axis=axis, method=method, level=level, copy=copy, limit=limit, fill_value=fill_value) def drop(self, labels=None, axis=0, index=None, columns=None, level=None, inplace=False, errors='raise'): """ Drop specified labels from rows or columns. Remove rows or columns by specifying label names and corresponding axis, or by specifying directly index or column names. When using a multi-index, labels on different levels can be removed by specifying the level. Parameters ---------- labels : single label or list-like Index or column labels to drop. axis : {0 or 'index', 1 or 'columns'}, default 0 Whether to drop labels from the index (0 or 'index') or columns (1 or 'columns'). index : single label or list-like Alternative to specifying axis (``labels, axis=0`` is equivalent to ``index=labels``). .. versionadded:: 0.21.0 columns : single label or list-like Alternative to specifying axis (``labels, axis=1`` is equivalent to ``columns=labels``). .. versionadded:: 0.21.0 level : int or level name, optional For MultiIndex, level from which the labels will be removed. inplace : bool, default False If True, do operation inplace and return None. errors : {'ignore', 'raise'}, default 'raise' If 'ignore', suppress error and only existing labels are dropped. Returns ------- DataFrame DataFrame without the removed index or column labels. Raises ------ KeyError If any of the labels is not found in the selected axis. See Also -------- DataFrame.loc : Label-location based indexer for selection by label. DataFrame.dropna : Return DataFrame with labels on given axis omitted where (all or any) data are missing. DataFrame.drop_duplicates : Return DataFrame with duplicate rows removed, optionally only considering certain columns. Series.drop : Return Series with specified index labels removed. Examples -------- >>> df = pd.DataFrame(np.arange(12).reshape(3, 4), ... columns=['A', 'B', 'C', 'D']) >>> df A B C D 0 0 1 2 3 1 4 5 6 7 2 8 9 10 11 Drop columns >>> df.drop(['B', 'C'], axis=1) A D 0 0 3 1 4 7 2 8 11 >>> df.drop(columns=['B', 'C']) A D 0 0 3 1 4 7 2 8 11 Drop a row by index >>> df.drop([0, 1]) A B C D 2 8 9 10 11 Drop columns and/or rows of MultiIndex DataFrame >>> midx = pd.MultiIndex(levels=[['lama', 'cow', 'falcon'], ... ['speed', 'weight', 'length']], ... codes=[[0, 0, 0, 1, 1, 1, 2, 2, 2], ... [0, 1, 2, 0, 1, 2, 0, 1, 2]]) >>> df = pd.DataFrame(index=midx, columns=['big', 'small'], ... data=[[45, 30], [200, 100], [1.5, 1], [30, 20], ... [250, 150], [1.5, 0.8], [320, 250], ... [1, 0.8], [0.3, 0.2]]) >>> df big small lama speed 45.0 30.0 weight 200.0 100.0 length 1.5 1.0 cow speed 30.0 20.0 weight 250.0 150.0 length 1.5 0.8 falcon speed 320.0 250.0 weight 1.0 0.8 length 0.3 0.2 >>> df.drop(index='cow', columns='small') big lama speed 45.0 weight 200.0 length 1.5 falcon speed 320.0 weight 1.0 length 0.3 >>> df.drop(index='length', level=1) big small lama speed 45.0 30.0 weight 200.0 100.0 cow speed 30.0 20.0 weight 250.0 150.0 falcon speed 320.0 250.0 weight 1.0 0.8 """ return super().drop(labels=labels, axis=axis, index=index, columns=columns, level=level, inplace=inplace, errors=errors) @rewrite_axis_style_signature('mapper', [('copy', True), ('inplace', False), ('level', None), ('errors', 'ignore')]) def rename(self, *args, **kwargs): """ Alter axes labels. Function / dict values must be unique (1-to-1). Labels not contained in a dict / Series will be left as-is. Extra labels listed don't throw an error. See the :ref:`user guide <basics.rename>` for more. Parameters ---------- mapper : dict-like or function Dict-like or functions transformations to apply to that axis' values. Use either ``mapper`` and ``axis`` to specify the axis to target with ``mapper``, or ``index`` and ``columns``. index : dict-like or function Alternative to specifying axis (``mapper, axis=0`` is equivalent to ``index=mapper``). columns : dict-like or function Alternative to specifying axis (``mapper, axis=1`` is equivalent to ``columns=mapper``). axis : int or str Axis to target with ``mapper``. Can be either the axis name ('index', 'columns') or number (0, 1). The default is 'index'. copy : bool, default True Also copy underlying data. inplace : bool, default False Whether to return a new DataFrame. If True then value of copy is ignored. level : int or level name, default None In case of a MultiIndex, only rename labels in the specified level. errors : {'ignore', 'raise'}, default 'ignore' If 'raise', raise a `KeyError` when a dict-like `mapper`, `index`, or `columns` contains labels that are not present in the Index being transformed. If 'ignore', existing keys will be renamed and extra keys will be ignored. Returns ------- DataFrame DataFrame with the renamed axis labels. Raises ------ KeyError If any of the labels is not found in the selected axis and "errors='raise'". See Also -------- DataFrame.rename_axis : Set the name of the axis. Examples -------- ``DataFrame.rename`` supports two calling conventions * ``(index=index_mapper, columns=columns_mapper, ...)`` * ``(mapper, axis={'index', 'columns'}, ...)`` We *highly* recommend using keyword arguments to clarify your intent. Rename columns using a mapping: >>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]}) >>> df.rename(columns={"A": "a", "B": "c"}) a c 0 1 4 1 2 5 2 3 6 Rename index using a mapping: >>> df.rename(index={0: "x", 1: "y", 2: "z"}) A B x 1 4 y 2 5 z 3 6 Cast index labels to a different type: >>> df.index RangeIndex(start=0, stop=3, step=1) >>> df.rename(index=str).index Index(['0', '1', '2'], dtype='object') >>> df.rename(columns={"A": "a", "B": "b", "C": "c"}, errors="raise") Traceback (most recent call last): KeyError: ['C'] not found in axis Using axis-style parameters >>> df.rename(str.lower, axis='columns') a b 0 1 4 1 2 5 2 3 6 >>> df.rename({1: 2, 2: 4}, axis='index') A B 0 1 4 2 2 5 4 3 6 """ axes = validate_axis_style_args(self, args, kwargs, 'mapper', 'rename') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('mapper', None) return super().rename(**kwargs) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.fillna.__doc__) def fillna(self, value=None, method=None, axis=None, inplace=False, limit=None, downcast=None, **kwargs): return super().fillna(value=value, method=method, axis=axis, inplace=inplace, limit=limit, downcast=downcast, **kwargs) @Appender(_shared_docs['replace'] % _shared_doc_kwargs) def replace(self, to_replace=None, value=None, inplace=False, limit=None, regex=False, method='pad'): return super().replace(to_replace=to_replace, value=value, inplace=inplace, limit=limit, regex=regex, method=method) @Appender(_shared_docs['shift'] % _shared_doc_kwargs) def shift(self, periods=1, freq=None, axis=0, fill_value=None): return super().shift(periods=periods, freq=freq, axis=axis, fill_value=fill_value) def set_index(self, keys, drop=True, append=False, inplace=False, verify_integrity=False): """ Set the DataFrame index using existing columns. Set the DataFrame index (row labels) using one or more existing columns or arrays (of the correct length). The index can replace the existing index or expand on it. Parameters ---------- keys : label or array-like or list of labels/arrays This parameter can be either a single column key, a single array of the same length as the calling DataFrame, or a list containing an arbitrary combination of column keys and arrays. Here, "array" encompasses :class:`Series`, :class:`Index`, ``np.ndarray``, and instances of :class:`~collections.abc.Iterator`. drop : bool, default True Delete columns to be used as the new index. append : bool, default False Whether to append columns to existing index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). verify_integrity : bool, default False Check the new index for duplicates. Otherwise defer the check until necessary. Setting to False will improve the performance of this method. Returns ------- DataFrame Changed row labels. See Also -------- DataFrame.reset_index : Opposite of set_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame({'month': [1, 4, 7, 10], ... 'year': [2012, 2014, 2013, 2014], ... 'sale': [55, 40, 84, 31]}) >>> df month year sale 0 1 2012 55 1 4 2014 40 2 7 2013 84 3 10 2014 31 Set the index to become the 'month' column: >>> df.set_index('month') year sale month 1 2012 55 4 2014 40 7 2013 84 10 2014 31 Create a MultiIndex using columns 'year' and 'month': >>> df.set_index(['year', 'month']) sale year month 2012 1 55 2014 4 40 2013 7 84 2014 10 31 Create a MultiIndex using an Index and a column: >>> df.set_index([pd.Index([1, 2, 3, 4]), 'year']) month sale year 1 2012 1 55 2 2014 4 40 3 2013 7 84 4 2014 10 31 Create a MultiIndex using two Series: >>> s = pd.Series([1, 2, 3, 4]) >>> df.set_index([s, s**2]) month year sale 1 1 1 2012 55 2 4 4 2014 40 3 9 7 2013 84 4 16 10 2014 31 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(keys, list): keys = [keys] err_msg = ('The parameter "keys" may be a column key, one-dimensional ' 'array, or a list containing only valid column keys and ' 'one-dimensional arrays.') missing = [] for col in keys: if isinstance(col, (ABCIndexClass, ABCSeries, np.ndarray, list, abc.Iterator)): # arrays are fine as long as they are one-dimensional # iterators get converted to list below if getattr(col, 'ndim', 1) != 1: raise ValueError(err_msg) else: # everything else gets tried as a key; see GH 24969 try: found = col in self.columns except TypeError: raise TypeError(err_msg + ' Received column of ' 'type {}'.format(type(col))) else: if not found: missing.append(col) if missing: raise KeyError('None of {} are in the columns'.format(missing)) if inplace: frame = self else: frame = self.copy() arrays = [] names = [] if append: names = [x for x in self.index.names] if isinstance(self.index, ABCMultiIndex): for i in range(self.index.nlevels): arrays.append(self.index._get_level_values(i)) else: arrays.append(self.index) to_remove = [] for col in keys: if isinstance(col, ABCMultiIndex): for n in range(col.nlevels): arrays.append(col._get_level_values(n)) names.extend(col.names) elif isinstance(col, (ABCIndexClass, ABCSeries)): # if Index then not MultiIndex (treated above) arrays.append(col) names.append(col.name) elif isinstance(col, (list, np.ndarray)): arrays.append(col) names.append(None) elif isinstance(col, abc.Iterator): arrays.append(list(col)) names.append(None) # from here, col can only be a column label else: arrays.append(frame[col]._values) names.append(col) if drop: to_remove.append(col) if len(arrays[-1]) != len(self): # check newest element against length of calling frame, since # ensure_index_from_sequences would not raise for append=False. raise ValueError('Length mismatch: Expected {len_self} rows, ' 'received array of length {len_col}'.format( len_self=len(self), len_col=len(arrays[-1]) )) index = ensure_index_from_sequences(arrays, names) if verify_integrity and not index.is_unique: duplicates = index[index.duplicated()].unique() raise ValueError('Index has duplicate keys: {dup}'.format( dup=duplicates)) # use set to handle duplicate column names gracefully in case of drop for c in set(to_remove): del frame[c] # clear up memory usage index._cleanup() frame.index = index if not inplace: return frame def reset_index(self, level=None, drop=False, inplace=False, col_level=0, col_fill=''): """ Reset the index, or a level of it. Reset the index of the DataFrame, and use the default one instead. If the DataFrame has a MultiIndex, this method can remove one or more levels. Parameters ---------- level : int, str, tuple, or list, default None Only remove the given levels from the index. Removes all levels by default. drop : bool, default False Do not try to insert index into dataframe columns. This resets the index to the default integer index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). col_level : int or str, default 0 If the columns have multiple levels, determines which level the labels are inserted into. By default it is inserted into the first level. col_fill : object, default '' If the columns have multiple levels, determines how the other levels are named. If None then the index name is repeated. Returns ------- DataFrame DataFrame with the new index. See Also -------- DataFrame.set_index : Opposite of reset_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame([('bird', 389.0), ... ('bird', 24.0), ... ('mammal', 80.5), ... ('mammal', np.nan)], ... index=['falcon', 'parrot', 'lion', 'monkey'], ... columns=('class', 'max_speed')) >>> df class max_speed falcon bird 389.0 parrot bird 24.0 lion mammal 80.5 monkey mammal NaN When we reset the index, the old index is added as a column, and a new sequential index is used: >>> df.reset_index() index class max_speed 0 falcon bird 389.0 1 parrot bird 24.0 2 lion mammal 80.5 3 monkey mammal NaN We can use the `drop` parameter to avoid the old index being added as a column: >>> df.reset_index(drop=True) class max_speed 0 bird 389.0 1 bird 24.0 2 mammal 80.5 3 mammal NaN You can also use `reset_index` with `MultiIndex`. >>> index = pd.MultiIndex.from_tuples([('bird', 'falcon'), ... ('bird', 'parrot'), ... ('mammal', 'lion'), ... ('mammal', 'monkey')], ... names=['class', 'name']) >>> columns = pd.MultiIndex.from_tuples([('speed', 'max'), ... ('species', 'type')]) >>> df = pd.DataFrame([(389.0, 'fly'), ... ( 24.0, 'fly'), ... ( 80.5, 'run'), ... (np.nan, 'jump')], ... index=index, ... columns=columns) >>> df speed species max type class name bird falcon 389.0 fly parrot 24.0 fly mammal lion 80.5 run monkey NaN jump If the index has multiple levels, we can reset a subset of them: >>> df.reset_index(level='class') class speed species max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we are not dropping the index, by default, it is placed in the top level. We can place it in another level: >>> df.reset_index(level='class', col_level=1) speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump When the index is inserted under another level, we can specify under which one with the parameter `col_fill`: >>> df.reset_index(level='class', col_level=1, col_fill='species') species speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we specify a nonexistent level for `col_fill`, it is created: >>> df.reset_index(level='class', col_level=1, col_fill='genus') genus speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump """ inplace = validate_bool_kwarg(inplace, 'inplace') if inplace: new_obj = self else: new_obj = self.copy() def _maybe_casted_values(index, labels=None): values = index._values if not isinstance(index, (PeriodIndex, DatetimeIndex)): if values.dtype == np.object_: values = lib.maybe_convert_objects(values) # if we have the labels, extract the values with a mask if labels is not None: mask = labels == -1 # we can have situations where the whole mask is -1, # meaning there is nothing found in labels, so make all nan's if mask.all(): values = np.empty(len(mask)) values.fill(np.nan) else: values = values.take(labels) # TODO(https://github.com/pandas-dev/pandas/issues/24206) # Push this into maybe_upcast_putmask? # We can't pass EAs there right now. Looks a bit # complicated. # So we unbox the ndarray_values, op, re-box. values_type = type(values) values_dtype = values.dtype if issubclass(values_type, DatetimeLikeArray): values = values._data if mask.any(): values, changed = maybe_upcast_putmask( values, mask, np.nan) if issubclass(values_type, DatetimeLikeArray): values = values_type(values, dtype=values_dtype) return values new_index = ibase.default_index(len(new_obj)) if level is not None: if not isinstance(level, (tuple, list)): level = [level] level = [self.index._get_level_number(lev) for lev in level] if len(level) < self.index.nlevels: new_index = self.index.droplevel(level) if not drop: if isinstance(self.index, MultiIndex): names = [n if n is not None else ('level_%d' % i) for (i, n) in enumerate(self.index.names)] to_insert = zip(self.index.levels, self.index.codes) else: default = 'index' if 'index' not in self else 'level_0' names = ([default] if self.index.name is None else [self.index.name]) to_insert = ((self.index, None),) multi_col = isinstance(self.columns, MultiIndex) for i, (lev, lab) in reversed(list(enumerate(to_insert))): if not (level is None or i in level): continue name = names[i] if multi_col: col_name = (list(name) if isinstance(name, tuple) else [name]) if col_fill is None: if len(col_name) not in (1, self.columns.nlevels): raise ValueError("col_fill=None is incompatible " "with incomplete column name " "{}".format(name)) col_fill = col_name[0] lev_num = self.columns._get_level_number(col_level) name_lst = [col_fill] * lev_num + col_name missing = self.columns.nlevels - len(name_lst) name_lst += [col_fill] * missing name = tuple(name_lst) # to ndarray and maybe infer different dtype level_values = _maybe_casted_values(lev, lab) new_obj.insert(0, name, level_values) new_obj.index = new_index if not inplace: return new_obj # ---------------------------------------------------------------------- # Reindex-based selection methods @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isna(self): return super().isna() @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isnull(self): return super().isnull() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notna(self): return super().notna() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notnull(self): return super().notnull() def dropna(self, axis=0, how='any', thresh=None, subset=None, inplace=False): """ Remove missing values. See the :ref:`User Guide <missing_data>` for more on which values are considered missing, and how to work with missing data. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 Determine if rows or columns which contain missing values are removed. * 0, or 'index' : Drop rows which contain missing values. * 1, or 'columns' : Drop columns which contain missing value. .. deprecated:: 0.23.0 Pass tuple or list to drop on multiple axes. Only a single axis is allowed. how : {'any', 'all'}, default 'any' Determine if row or column is removed from DataFrame, when we have at least one NA or all NA. * 'any' : If any NA values are present, drop that row or column. * 'all' : If all values are NA, drop that row or column. thresh : int, optional Require that many non-NA values. subset : array-like, optional Labels along other axis to consider, e.g. if you are dropping rows these would be a list of columns to include. inplace : bool, default False If True, do operation inplace and return None. Returns ------- DataFrame DataFrame with NA entries dropped from it. See Also -------- DataFrame.isna: Indicate missing values. DataFrame.notna : Indicate existing (non-missing) values. DataFrame.fillna : Replace missing values. Series.dropna : Drop missing values. Index.dropna : Drop missing indices. Examples -------- >>> df = pd.DataFrame({"name": ['Alfred', 'Batman', 'Catwoman'], ... "toy": [np.nan, 'Batmobile', 'Bullwhip'], ... "born": [pd.NaT, pd.Timestamp("1940-04-25"), ... pd.NaT]}) >>> df name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Drop the rows where at least one element is missing. >>> df.dropna() name toy born 1 Batman Batmobile 1940-04-25 Drop the columns where at least one element is missing. >>> df.dropna(axis='columns') name 0 Alfred 1 Batman 2 Catwoman Drop the rows where all elements are missing. >>> df.dropna(how='all') name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Keep only the rows with at least 2 non-NA values. >>> df.dropna(thresh=2) name toy born 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Define in which columns to look for missing values. >>> df.dropna(subset=['name', 'born']) name toy born 1 Batman Batmobile 1940-04-25 Keep the DataFrame with valid entries in the same variable. >>> df.dropna(inplace=True) >>> df name toy born 1 Batman Batmobile 1940-04-25 """ inplace = validate_bool_kwarg(inplace, 'inplace') if isinstance(axis, (tuple, list)): # GH20987 msg = ("supplying multiple axes to axis is deprecated and " "will be removed in a future version.") warnings.warn(msg, FutureWarning, stacklevel=2) result = self for ax in axis: result = result.dropna(how=how, thresh=thresh, subset=subset, axis=ax) else: axis = self._get_axis_number(axis) agg_axis = 1 - axis agg_obj = self if subset is not None: ax = self._get_axis(agg_axis) indices = ax.get_indexer_for(subset) check = indices == -1 if check.any(): raise KeyError(list(np.compress(check, subset))) agg_obj = self.take(indices, axis=agg_axis) count = agg_obj.count(axis=agg_axis) if thresh is not None: mask = count >= thresh elif how == 'any': mask = count == len(agg_obj._get_axis(agg_axis)) elif how == 'all': mask = count > 0 else: if how is not None: raise ValueError('invalid how option: {h}'.format(h=how)) else: raise TypeError('must specify how or thresh') result = self.loc(axis=axis)[mask] if inplace: self._update_inplace(result) else: return result def drop_duplicates(self, subset=None, keep='first', inplace=False): """ Return DataFrame with duplicate rows removed, optionally only considering certain columns. Indexes, including time indexes are ignored. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Drop duplicates except for the first occurrence. - ``last`` : Drop duplicates except for the last occurrence. - False : Drop all duplicates. inplace : boolean, default False Whether to drop duplicates in place or to return a copy Returns ------- DataFrame """ if self.empty: return self.copy() inplace = validate_bool_kwarg(inplace, 'inplace') duplicated = self.duplicated(subset, keep=keep) if inplace: inds, = (-duplicated)._ndarray_values.nonzero() new_data = self._data.take(inds) self._update_inplace(new_data) else: return self[-duplicated] def duplicated(self, subset=None, keep='first'): """ Return boolean Series denoting duplicate rows, optionally only considering certain columns. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Mark duplicates as ``True`` except for the first occurrence. - ``last`` : Mark duplicates as ``True`` except for the last occurrence. - False : Mark all duplicates as ``True``. Returns ------- Series """ from pandas.core.sorting import get_group_index from pandas._libs.hashtable import duplicated_int64, _SIZE_HINT_LIMIT if self.empty: return Series(dtype=bool) def f(vals): labels, shape = algorithms.factorize( vals, size_hint=min(len(self), _SIZE_HINT_LIMIT)) return labels.astype('i8', copy=False), len(shape) if subset is None: subset = self.columns elif (not np.iterable(subset) or isinstance(subset, str) or isinstance(subset, tuple) and subset in self.columns): subset = subset, # Verify all columns in subset exist in the queried dataframe # Otherwise, raise a KeyError, same as if you try to __getitem__ with a # key that doesn't exist. diff = Index(subset).difference(self.columns) if not diff.empty: raise KeyError(diff) vals = (col.values for name, col in self.iteritems() if name in subset) labels, shape = map(list, zip(*map(f, vals))) ids = get_group_index(labels, shape, sort=False, xnull=False) return Series(duplicated_int64(ids, keep), index=self.index) # ---------------------------------------------------------------------- # Sorting @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_values.__doc__) def sort_values(self, by, axis=0, ascending=True, inplace=False, kind='quicksort', na_position='last'): inplace = validate_bool_kwarg(inplace, 'inplace') axis = self._get_axis_number(axis) if not isinstance(by, list): by = [by] if is_sequence(ascending) and len(by) != len(ascending): raise ValueError('Length of ascending (%d) != length of by (%d)' % (len(ascending), len(by))) if len(by) > 1: from pandas.core.sorting import lexsort_indexer keys = [self._get_label_or_level_values(x, axis=axis) for x in by] indexer = lexsort_indexer(keys, orders=ascending, na_position=na_position) indexer = ensure_platform_int(indexer) else: from pandas.core.sorting import nargsort by = by[0] k = self._get_label_or_level_values(by, axis=axis) if isinstance(ascending, (tuple, list)): ascending = ascending[0] indexer = nargsort(k, kind=kind, ascending=ascending, na_position=na_position) new_data = self._data.take(indexer, axis=self._get_block_manager_axis(axis), verify=False) if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_index.__doc__) def sort_index(self, axis=0, level=None, ascending=True, inplace=False, kind='quicksort', na_position='last', sort_remaining=True, by=None): # TODO: this can be combined with Series.sort_index impl as # almost identical inplace = validate_bool_kwarg(inplace, 'inplace') # 10726 if by is not None: warnings.warn("by argument to sort_index is deprecated, " "please use .sort_values(by=...)", FutureWarning, stacklevel=2) if level is not None: raise ValueError("unable to simultaneously sort by and level") return self.sort_values(by, axis=axis, ascending=ascending, inplace=inplace) axis = self._get_axis_number(axis) labels = self._get_axis(axis) # make sure that the axis is lexsorted to start # if not we need to reconstruct to get the correct indexer labels = labels._sort_levels_monotonic() if level is not None: new_axis, indexer = labels.sortlevel(level, ascending=ascending, sort_remaining=sort_remaining) elif isinstance(labels, MultiIndex): from pandas.core.sorting import lexsort_indexer indexer = lexsort_indexer(labels._get_codes_for_sorting(), orders=ascending, na_position=na_position) else: from pandas.core.sorting import nargsort # Check monotonic-ness before sort an index # GH11080 if ((ascending and labels.is_monotonic_increasing) or (not ascending and labels.is_monotonic_decreasing)): if inplace: return else: return self.copy() indexer = nargsort(labels, kind=kind, ascending=ascending, na_position=na_position) baxis = self._get_block_manager_axis(axis) new_data = self._data.take(indexer, axis=baxis, verify=False) # reconstruct axis if needed new_data.axes[baxis] = new_data.axes[baxis]._sort_levels_monotonic() if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) def nlargest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in descending order. Return the first `n` rows with the largest values in `columns`, in descending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=False).head(n)``, but more performant. Parameters ---------- n : int Number of rows to return. columns : label or list of labels Column label(s) to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - `first` : prioritize the first occurrence(s) - `last` : prioritize the last occurrence(s) - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame The first `n` rows ordered by the given columns in descending order. See Also -------- DataFrame.nsmallest : Return the first `n` rows ordered by `columns` in ascending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Notes ----- This function cannot be used with all column types. For example, when specifying columns with `object` or `category` dtypes, ``TypeError`` is raised. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nlargest`` to select the three rows having the largest values in column "population". >>> df.nlargest(3, 'population') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT When using ``keep='last'``, ties are resolved in reverse order: >>> df.nlargest(3, 'population', keep='last') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN When using ``keep='all'``, all duplicate items are maintained: >>> df.nlargest(3, 'population', keep='all') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN To order by the largest values in column "population" and then "GDP", we can specify multiple columns like in the next example. >>> df.nlargest(3, ['population', 'GDP']) population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nlargest() def nsmallest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in ascending order. Return the first `n` rows with the smallest values in `columns`, in ascending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=True).head(n)``, but more performant. Parameters ---------- n : int Number of items to retrieve. columns : list or str Column name or names to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - ``first`` : take the first occurrence. - ``last`` : take the last occurrence. - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame See Also -------- DataFrame.nlargest : Return the first `n` rows ordered by `columns` in descending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nsmallest`` to select the three rows having the smallest values in column "a". >>> df.nsmallest(3, 'population') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI When using ``keep='last'``, ties are resolved in reverse order: >>> df.nsmallest(3, 'population', keep='last') population GDP alpha-2 Anguilla 11300 311 AI Tuvalu 11300 38 TV Nauru 11300 182 NR When using ``keep='all'``, all duplicate items are maintained: >>> df.nsmallest(3, 'population', keep='all') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI To order by the largest values in column "a" and then "c", we can specify multiple columns like in the next example. >>> df.nsmallest(3, ['population', 'GDP']) population GDP alpha-2 Tuvalu 11300 38 TV Nauru 11300 182 NR Anguilla 11300 311 AI """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nsmallest() def swaplevel(self, i=-2, j=-1, axis=0): """ Swap levels i and j in a MultiIndex on a particular axis. Parameters ---------- i, j : int, string (can be mixed) Level of index to be swapped. Can pass level name as string. Returns ------- DataFrame .. versionchanged:: 0.18.1 The indexes ``i`` and ``j`` are now optional, and default to the two innermost levels of the index. """ result = self.copy() axis = self._get_axis_number(axis) if axis == 0: result.index = result.index.swaplevel(i, j) else: result.columns = result.columns.swaplevel(i, j) return result def reorder_levels(self, order, axis=0): """ Rearrange index levels using input order. May not drop or duplicate levels. Parameters ---------- order : list of int or list of str List representing new level order. Reference level by number (position) or by key (label). axis : int Where to reorder levels. Returns ------- type of caller (new object) """ axis = self._get_axis_number(axis) if not isinstance(self._get_axis(axis), MultiIndex): # pragma: no cover raise TypeError('Can only reorder levels on a hierarchical axis.') result = self.copy() if axis == 0: result.index = result.index.reorder_levels(order) else: result.columns = result.columns.reorder_levels(order) return result # ---------------------------------------------------------------------- # Arithmetic / combination related def _combine_frame(self, other, func, fill_value=None, level=None): this, other = self.align(other, join='outer', level=level, copy=False) new_index, new_columns = this.index, this.columns def _arith_op(left, right): # for the mixed_type case where we iterate over columns, # _arith_op(left, right) is equivalent to # left._binop(right, func, fill_value=fill_value) left, right = ops.fill_binop(left, right, fill_value) return func(left, right) if ops.should_series_dispatch(this, other, func): # iterate over columns return ops.dispatch_to_series(this, other, _arith_op) else: result = _arith_op(this.values, other.values) return self._constructor(result, index=new_index, columns=new_columns, copy=False) def _combine_match_index(self, other, func, level=None): left, right = self.align(other, join='outer', axis=0, level=level, copy=False) assert left.index.equals(right.index) if left._is_mixed_type or right._is_mixed_type: # operate column-wise; avoid costly object-casting in `.values` return ops.dispatch_to_series(left, right, func) else: # fastpath --> operate directly on values with np.errstate(all="ignore"): new_data = func(left.values.T, right.values).T return self._constructor(new_data, index=left.index, columns=self.columns, copy=False) def _combine_match_columns(self, other, func, level=None): assert isinstance(other, Series) left, right = self.align(other, join='outer', axis=1, level=level, copy=False) assert left.columns.equals(right.index) return ops.dispatch_to_series(left, right, func, axis="columns") def _combine_const(self, other, func): assert lib.is_scalar(other) or np.ndim(other) == 0 return ops.dispatch_to_series(self, other, func) def combine(self, other, func, fill_value=None, overwrite=True): """ Perform column-wise combine with another DataFrame. Combines a DataFrame with `other` DataFrame using `func` to element-wise combine columns. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame The DataFrame to merge column-wise. func : function Function that takes two series as inputs and return a Series or a scalar. Used to merge the two dataframes column by columns. fill_value : scalar value, default None The value to fill NaNs with prior to passing any column to the merge func. overwrite : bool, default True If True, columns in `self` that do not exist in `other` will be overwritten with NaNs. Returns ------- DataFrame Combination of the provided DataFrames. See Also -------- DataFrame.combine_first : Combine two DataFrame objects and default to non-null values in frame calling the method. Examples -------- Combine using a simple function that chooses the smaller column. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> take_smaller = lambda s1, s2: s1 if s1.sum() < s2.sum() else s2 >>> df1.combine(df2, take_smaller) A B 0 0 3 1 0 3 Example using a true element-wise combine function. >>> df1 = pd.DataFrame({'A': [5, 0], 'B': [2, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, np.minimum) A B 0 1 2 1 0 3 Using `fill_value` fills Nones prior to passing the column to the merge function. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 4.0 However, if the same element in both dataframes is None, that None is preserved >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [None, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 3.0 Example that demonstrates the use of `overwrite` and behavior when the axis differ between the dataframes. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [-10, 1], }, index=[1, 2]) >>> df1.combine(df2, take_smaller) A B C 0 NaN NaN NaN 1 NaN 3.0 -10.0 2 NaN 3.0 1.0 >>> df1.combine(df2, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 -10.0 2 NaN 3.0 1.0 Demonstrating the preference of the passed in dataframe. >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1], }, index=[1, 2]) >>> df2.combine(df1, take_smaller) A B C 0 0.0 NaN NaN 1 0.0 3.0 NaN 2 NaN 3.0 NaN >>> df2.combine(df1, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ other_idxlen = len(other.index) # save for compare this, other = self.align(other, copy=False) new_index = this.index if other.empty and len(new_index) == len(self.index): return self.copy() if self.empty and len(other) == other_idxlen: return other.copy() # sorts if possible new_columns = this.columns.union(other.columns) do_fill = fill_value is not None result = {} for col in new_columns: series = this[col] otherSeries = other[col] this_dtype = series.dtype other_dtype = otherSeries.dtype this_mask = isna(series) other_mask = isna(otherSeries) # don't overwrite columns unnecessarily # DO propagate if this column is not in the intersection if not overwrite and other_mask.all(): result[col] = this[col].copy() continue if do_fill: series = series.copy() otherSeries = otherSeries.copy() series[this_mask] = fill_value otherSeries[other_mask] = fill_value if col not in self.columns: # If self DataFrame does not have col in other DataFrame, # try to promote series, which is all NaN, as other_dtype. new_dtype = other_dtype try: series = series.astype(new_dtype, copy=False) except ValueError: # e.g. new_dtype is integer types pass else: # if we have different dtypes, possibly promote new_dtype = find_common_type([this_dtype, other_dtype]) if not is_dtype_equal(this_dtype, new_dtype): series = series.astype(new_dtype) if not is_dtype_equal(other_dtype, new_dtype): otherSeries = otherSeries.astype(new_dtype) arr = func(series, otherSeries) arr = maybe_downcast_to_dtype(arr, this_dtype) result[col] = arr # convert_objects just in case return self._constructor(result, index=new_index, columns=new_columns) def combine_first(self, other): """ Update null elements with value in the same location in `other`. Combine two DataFrame objects by filling null values in one DataFrame with non-null values from other DataFrame. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame Provided DataFrame to use to fill null values. Returns ------- DataFrame See Also -------- DataFrame.combine : Perform series-wise operation on two DataFrames using a given function. Examples -------- >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine_first(df2) A B 0 1.0 3.0 1 0.0 4.0 Null values still persist if the location of that null value does not exist in `other` >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [4, None]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1]}, index=[1, 2]) >>> df1.combine_first(df2) A B C 0 NaN 4.0 NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ import pandas.core.computation.expressions as expressions def extract_values(arr): # Does two things: # 1. maybe gets the values from the Series / Index # 2. convert datelike to i8 if isinstance(arr, (ABCIndexClass, ABCSeries)): arr = arr._values if needs_i8_conversion(arr): if is_extension_array_dtype(arr.dtype): arr = arr.asi8 else: arr = arr.view('i8') return arr def combiner(x, y): mask = isna(x) if isinstance(mask, (ABCIndexClass, ABCSeries)): mask = mask._values x_values = extract_values(x) y_values = extract_values(y) # If the column y in other DataFrame is not in first DataFrame, # just return y_values. if y.name not in self.columns: return y_values return expressions.where(mask, y_values, x_values) return self.combine(other, combiner, overwrite=False) @deprecate_kwarg(old_arg_name='raise_conflict', new_arg_name='errors', mapping={False: 'ignore', True: 'raise'}) def update(self, other, join='left', overwrite=True, filter_func=None, errors='ignore'): """ Modify in place using non-NA values from another DataFrame. Aligns on indices. There is no return value. Parameters ---------- other : DataFrame, or object coercible into a DataFrame Should have at least one matching index/column label with the original DataFrame. If a Series is passed, its name attribute must be set, and that will be used as the column name to align with the original DataFrame. join : {'left'}, default 'left' Only left join is implemented, keeping the index and columns of the original object. overwrite : bool, default True How to handle non-NA values for overlapping keys: * True: overwrite original DataFrame's values with values from `other`. * False: only update values that are NA in the original DataFrame. filter_func : callable(1d-array) -> bool 1d-array, optional Can choose to replace values other than NA. Return True for values that should be updated. errors : {'raise', 'ignore'}, default 'ignore' If 'raise', will raise a ValueError if the DataFrame and `other` both contain non-NA data in the same place. .. versionchanged :: 0.24.0 Changed from `raise_conflict=False|True` to `errors='ignore'|'raise'`. Returns ------- None : method directly changes calling object Raises ------ ValueError * When `errors='raise'` and there's overlapping non-NA data. * When `errors` is not either `'ignore'` or `'raise'` NotImplementedError * If `join != 'left'` See Also -------- dict.update : Similar method for dictionaries. DataFrame.merge : For column(s)-on-columns(s) operations. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, 5, 6], ... 'C': [7, 8, 9]}) >>> df.update(new_df) >>> df A B 0 1 4 1 2 5 2 3 6 The DataFrame's length does not increase as a result of the update, only values at matching index/column labels are updated. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e', 'f', 'g', 'h', 'i']}) >>> df.update(new_df) >>> df A B 0 a d 1 b e 2 c f For Series, it's name attribute must be set. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_column = pd.Series(['d', 'e'], name='B', index=[0, 2]) >>> df.update(new_column) >>> df A B 0 a d 1 b y 2 c e >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e']}, index=[1, 2]) >>> df.update(new_df) >>> df A B 0 a x 1 b d 2 c e If `other` contains NaNs the corresponding values are not updated in the original dataframe. >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, np.nan, 6]}) >>> df.update(new_df) >>> df A B 0 1 4.0 1 2 500.0 2 3 6.0 """ import pandas.core.computation.expressions as expressions # TODO: Support other joins if join != 'left': # pragma: no cover raise NotImplementedError("Only left join is supported") if errors not in ['ignore', 'raise']: raise ValueError("The parameter errors must be either " "'ignore' or 'raise'") if not isinstance(other, DataFrame): other = DataFrame(other) other = other.reindex_like(self) for col in self.columns: this = self[col]._values that = other[col]._values if filter_func is not None: with np.errstate(all='ignore'): mask = ~filter_func(this) | isna(that) else: if errors == 'raise': mask_this = notna(that) mask_that = notna(this) if any(mask_this & mask_that): raise ValueError("Data overlaps.") if overwrite: mask = isna(that) else: mask = notna(this) # don't overwrite columns unnecessarily if mask.all(): continue self[col] = expressions.where(mask, this, that) # ---------------------------------------------------------------------- # Data reshaping _shared_docs['pivot'] = """ Return reshaped DataFrame organized by given index / column values. Reshape data (produce a "pivot" table) based on column values. Uses unique values from specified `index` / `columns` to form axes of the resulting DataFrame. This function does not support data aggregation, multiple values will result in a MultiIndex in the columns. See the :ref:`User Guide <reshaping>` for more on reshaping. Parameters ----------%s index : string or object, optional Column to use to make new frame's index. If None, uses existing index. columns : string or object Column to use to make new frame's columns. values : string, object or a list of the previous, optional Column(s) to use for populating new frame's values. If not specified, all remaining columns will be used and the result will have hierarchically indexed columns. .. versionchanged :: 0.23.0 Also accept list of column names. Returns ------- DataFrame Returns reshaped DataFrame. Raises ------ ValueError: When there are any `index`, `columns` combinations with multiple values. `DataFrame.pivot_table` when you need to aggregate. See Also -------- DataFrame.pivot_table : Generalization of pivot that can handle duplicate values for one index/column pair. DataFrame.unstack : Pivot based on the index values instead of a column. Notes ----- For finer-tuned control, see hierarchical indexing documentation along with the related stack/unstack methods. Examples -------- >>> df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two', ... 'two'], ... 'bar': ['A', 'B', 'C', 'A', 'B', 'C'], ... 'baz': [1, 2, 3, 4, 5, 6], ... 'zoo': ['x', 'y', 'z', 'q', 'w', 't']}) >>> df foo bar baz zoo 0 one A 1 x 1 one B 2 y 2 one C 3 z 3 two A 4 q 4 two B 5 w 5 two C 6 t >>> df.pivot(index='foo', columns='bar', values='baz') bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar')['baz'] bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar', values=['baz', 'zoo']) baz zoo bar A B C A B C foo one 1 2 3 x y z two 4 5 6 q w t A ValueError is raised if there are any duplicates. >>> df = pd.DataFrame({"foo": ['one', 'one', 'two', 'two'], ... "bar": ['A', 'A', 'B', 'C'], ... "baz": [1, 2, 3, 4]}) >>> df foo bar baz 0 one A 1 1 one A 2 2 two B 3 3 two C 4 Notice that the first two rows are the same for our `index` and `columns` arguments. >>> df.pivot(index='foo', columns='bar', values='baz') Traceback (most recent call last): ... ValueError: Index contains duplicate entries, cannot reshape """ @Substitution('') @Appender(_shared_docs['pivot']) def pivot(self, index=None, columns=None, values=None): from pandas.core.reshape.pivot import pivot return pivot(self, index=index, columns=columns, values=values) _shared_docs['pivot_table'] = """ Create a spreadsheet-style pivot table as a DataFrame. The levels in the pivot table will be stored in MultiIndex objects (hierarchical indexes) on the index and columns of the result DataFrame. Parameters ----------%s values : column to aggregate, optional index : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table index. If an array is passed, it is being used as the same manner as column values. columns : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table column. If an array is passed, it is being used as the same manner as column values. aggfunc : function, list of functions, dict, default numpy.mean If list of functions passed, the resulting pivot table will have hierarchical columns whose top level are the function names (inferred from the function objects themselves) If dict is passed, the key is column to aggregate and value is function or list of functions fill_value : scalar, default None Value to replace missing values with margins : boolean, default False Add all row / columns (e.g. for subtotal / grand totals) dropna : boolean, default True Do not include columns whose entries are all NaN margins_name : string, default 'All' Name of the row / column that will contain the totals when margins is True. observed : boolean, default False This only applies if any of the groupers are Categoricals. If True: only show observed values for categorical groupers. If False: show all values for categorical groupers. .. versionchanged :: 0.25.0 Returns ------- DataFrame See Also -------- DataFrame.pivot : Pivot without aggregation that can handle non-numeric data. Examples -------- >>> df = pd.DataFrame({"A": ["foo", "foo", "foo", "foo", "foo", ... "bar", "bar", "bar", "bar"], ... "B": ["one", "one", "one", "two", "two", ... "one", "one", "two", "two"], ... "C": ["small", "large", "large", "small", ... "small", "large", "small", "small", ... "large"], ... "D": [1, 2, 2, 3, 3, 4, 5, 6, 7], ... "E": [2, 4, 5, 5, 6, 6, 8, 9, 9]}) >>> df A B C D E 0 foo one small 1 2 1 foo one large 2 4 2 foo one large 2 5 3 foo two small 3 5 4 foo two small 3 6 5 bar one large 4 6 6 bar one small 5 8 7 bar two small 6 9 8 bar two large 7 9 This first example aggregates values by taking the sum. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum) >>> table C large small A B bar one 4.0 5.0 two 7.0 6.0 foo one 4.0 1.0 two NaN 6.0 We can also fill missing values using the `fill_value` parameter. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum, fill_value=0) >>> table C large small A B bar one 4 5 two 7 6 foo one 4 1 two 0 6 The next example aggregates by taking the mean across multiple columns. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': np.mean}) >>> table D E A C bar large 5.500000 7.500000 small 5.500000 8.500000 foo large 2.000000 4.500000 small 2.333333 4.333333 We can also calculate multiple types of aggregations for any given value column. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': [min, max, np.mean]}) >>> table D E mean max mean min A C bar large 5.500000 9.0 7.500000 6.0 small 5.500000 9.0 8.500000 8.0 foo large 2.000000 5.0 4.500000 4.0 small 2.333333 6.0 4.333333 2.0 """ @Substitution('') @Appender(_shared_docs['pivot_table']) def pivot_table(self, values=None, index=None, columns=None, aggfunc='mean', fill_value=None, margins=False, dropna=True, margins_name='All', observed=False): from pandas.core.reshape.pivot import pivot_table return pivot_table(self, values=values, index=index, columns=columns, aggfunc=aggfunc, fill_value=fill_value, margins=margins, dropna=dropna, margins_name=margins_name, observed=observed) def stack(self, level=-1, dropna=True): """ Stack the prescribed level(s) from columns to index. Return a reshaped DataFrame or Series having a multi-level index with one or more new inner-most levels compared to the current DataFrame. The new inner-most levels are created by pivoting the columns of the current dataframe: - if the columns have a single level, the output is a Series; - if the columns have multiple levels, the new index level(s) is (are) taken from the prescribed level(s) and the output is a DataFrame. The new index levels are sorted. Parameters ---------- level : int, str, list, default -1 Level(s) to stack from the column axis onto the index axis, defined as one index or label, or a list of indices or labels. dropna : bool, default True Whether to drop rows in the resulting Frame/Series with missing values. Stacking a column level onto the index axis can create combinations of index and column values that are missing from the original dataframe. See Examples section. Returns ------- DataFrame or Series Stacked dataframe or series. See Also -------- DataFrame.unstack : Unstack prescribed level(s) from index axis onto column axis. DataFrame.pivot : Reshape dataframe from long format to wide format. DataFrame.pivot_table : Create a spreadsheet-style pivot table as a DataFrame. Notes ----- The function is named by analogy with a collection of books being reorganized from being side by side on a horizontal position (the columns of the dataframe) to being stacked vertically on top of each other (in the index of the dataframe). Examples -------- **Single level columns** >>> df_single_level_cols = pd.DataFrame([[0, 1], [2, 3]], ... index=['cat', 'dog'], ... columns=['weight', 'height']) Stacking a dataframe with a single level column axis returns a Series: >>> df_single_level_cols weight height cat 0 1 dog 2 3 >>> df_single_level_cols.stack() cat weight 0 height 1 dog weight 2 height 3 dtype: int64 **Multi level columns: simple case** >>> multicol1 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('weight', 'pounds')]) >>> df_multi_level_cols1 = pd.DataFrame([[1, 2], [2, 4]], ... index=['cat', 'dog'], ... columns=multicol1) Stacking a dataframe with a multi-level column axis: >>> df_multi_level_cols1 weight kg pounds cat 1 2 dog 2 4 >>> df_multi_level_cols1.stack() weight cat kg 1 pounds 2 dog kg 2 pounds 4 **Missing values** >>> multicol2 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('height', 'm')]) >>> df_multi_level_cols2 = pd.DataFrame([[1.0, 2.0], [3.0, 4.0]], ... index=['cat', 'dog'], ... columns=multicol2) It is common to have missing values when stacking a dataframe with multi-level columns, as the stacked dataframe typically has more values than the original dataframe. Missing values are filled with NaNs: >>> df_multi_level_cols2 weight height kg m cat 1.0 2.0 dog 3.0 4.0 >>> df_multi_level_cols2.stack() height weight cat kg NaN 1.0 m 2.0 NaN dog kg NaN 3.0 m 4.0 NaN **Prescribing the level(s) to be stacked** The first parameter controls which level or levels are stacked: >>> df_multi_level_cols2.stack(0) kg m cat height NaN 2.0 weight 1.0 NaN dog height NaN 4.0 weight 3.0 NaN >>> df_multi_level_cols2.stack([0, 1]) cat height m 2.0 weight kg 1.0 dog height m 4.0 weight kg 3.0 dtype: float64 **Dropping missing values** >>> df_multi_level_cols3 = pd.DataFrame([[None, 1.0], [2.0, 3.0]], ... index=['cat', 'dog'], ... columns=multicol2) Note that rows where all values are missing are dropped by default but this behaviour can be controlled via the dropna keyword parameter: >>> df_multi_level_cols3 weight height kg m cat NaN 1.0 dog 2.0 3.0 >>> df_multi_level_cols3.stack(dropna=False) height weight cat kg NaN NaN m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN >>> df_multi_level_cols3.stack(dropna=True) height weight cat m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN """ from pandas.core.reshape.reshape import stack, stack_multiple if isinstance(level, (tuple, list)): return stack_multiple(self, level, dropna=dropna) else: return stack(self, level, dropna=dropna) def unstack(self, level=-1, fill_value=None): """ Pivot a level of the (necessarily hierarchical) index labels, returning a DataFrame having a new level of column labels whose inner-most level consists of the pivoted index labels. If the index is not a MultiIndex, the output will be a Series (the analogue of stack when the columns are not a MultiIndex). The level involved will automatically get sorted. Parameters ---------- level : int, string, or list of these, default -1 (last level) Level(s) of index to unstack, can pass level name fill_value : replace NaN with this value if the unstack produces missing values .. versionadded:: 0.18.0 Returns ------- Series or DataFrame See Also -------- DataFrame.pivot : Pivot a table based on column values. DataFrame.stack : Pivot a level of the column labels (inverse operation from `unstack`). Examples -------- >>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'), ... ('two', 'a'), ('two', 'b')]) >>> s = pd.Series(np.arange(1.0, 5.0), index=index) >>> s one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 >>> s.unstack(level=-1) a b one 1.0 2.0 two 3.0 4.0 >>> s.unstack(level=0) one two a 1.0 3.0 b 2.0 4.0 >>> df = s.unstack(level=0) >>> df.unstack() one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 """ from pandas.core.reshape.reshape import unstack return unstack(self, level, fill_value) _shared_docs['melt'] = (""" Unpivot a DataFrame from wide format to long format, optionally leaving identifier variables set. This function is useful to massage a DataFrame into a format where one or more columns are identifier variables (`id_vars`), while all other columns, considered measured variables (`value_vars`), are "unpivoted" to the row axis, leaving just two non-identifier columns, 'variable' and 'value'. %(versionadded)s Parameters ---------- frame : DataFrame id_vars : tuple, list, or ndarray, optional Column(s) to use as identifier variables. value_vars : tuple, list, or ndarray, optional Column(s) to unpivot. If not specified, uses all columns that are not set as `id_vars`. var_name : scalar Name to use for the 'variable' column. If None it uses ``frame.columns.name`` or 'variable'. value_name : scalar, default 'value' Name to use for the 'value' column. col_level : int or string, optional If columns are a MultiIndex then use this level to melt. Returns ------- DataFrame Unpivoted DataFrame. See Also -------- %(other)s pivot_table DataFrame.pivot Examples -------- >>> df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'}, ... 'B': {0: 1, 1: 3, 2: 5}, ... 'C': {0: 2, 1: 4, 2: 6}}) >>> df A B C 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)sid_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=['A'], value_vars=['B', 'C']) A variable value 0 a B 1 1 b B 3 2 c B 5 3 a C 2 4 b C 4 5 c C 6 The names of 'variable' and 'value' columns can be customized: >>> %(caller)sid_vars=['A'], value_vars=['B'], ... var_name='myVarname', value_name='myValname') A myVarname myValname 0 a B 1 1 b B 3 2 c B 5 If you have multi-index columns: >>> df.columns = [list('ABC'), list('DEF')] >>> df A B C D E F 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)scol_level=0, id_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=[('A', 'D')], value_vars=[('B', 'E')]) (A, D) variable_0 variable_1 value 0 a B E 1 1 b B E 3 2 c B E 5 """) @Appender(_shared_docs['melt'] % dict(caller='df.melt(', versionadded='.. versionadded:: 0.20.0\n', other='melt')) def melt(self, id_vars=None, value_vars=None, var_name=None, value_name='value', col_level=None): from pandas.core.reshape.melt import melt return melt(self, id_vars=id_vars, value_vars=value_vars, var_name=var_name, value_name=value_name, col_level=col_level) # ---------------------------------------------------------------------- # Time series-related def diff(self, periods=1, axis=0): """ First discrete difference of element. Calculates the difference of a DataFrame element compared with another element in the DataFrame (default is the element in the same column of the previous row). Parameters ---------- periods : int, default 1 Periods to shift for calculating difference, accepts negative values. axis : {0 or 'index', 1 or 'columns'}, default 0 Take difference over rows (0) or columns (1). .. versionadded:: 0.16.1. Returns ------- DataFrame See Also -------- Series.diff: First discrete difference for a Series. DataFrame.pct_change: Percent change over given number of periods. DataFrame.shift: Shift index by desired number of periods with an optional time freq. Examples -------- Difference with previous row >>> df = pd.DataFrame({'a': [1, 2, 3, 4, 5, 6], ... 'b': [1, 1, 2, 3, 5, 8], ... 'c': [1, 4, 9, 16, 25, 36]}) >>> df a b c 0 1 1 1 1 2 1 4 2 3 2 9 3 4 3 16 4 5 5 25 5 6 8 36 >>> df.diff() a b c 0 NaN NaN NaN 1 1.0 0.0 3.0 2 1.0 1.0 5.0 3 1.0 1.0 7.0 4 1.0 2.0 9.0 5 1.0 3.0 11.0 Difference with previous column >>> df.diff(axis=1) a b c 0 NaN 0.0 0.0 1 NaN -1.0 3.0 2 NaN -1.0 7.0 3 NaN -1.0 13.0 4 NaN 0.0 20.0 5 NaN 2.0 28.0 Difference with 3rd previous row >>> df.diff(periods=3) a b c 0 NaN NaN NaN 1 NaN NaN NaN 2 NaN NaN NaN 3 3.0 2.0 15.0 4 3.0 4.0 21.0 5 3.0 6.0 27.0 Difference with following row >>> df.diff(periods=-1) a b c 0 -1.0 0.0 -3.0 1 -1.0 -1.0 -5.0 2 -1.0 -1.0 -7.0 3 -1.0 -2.0 -9.0 4 -1.0 -3.0 -11.0 5 NaN NaN NaN """ bm_axis = self._get_block_manager_axis(axis) new_data = self._data.diff(n=periods, axis=bm_axis) return self._constructor(new_data) # ---------------------------------------------------------------------- # Function application def _gotitem(self, key: Union[str, List[str]], ndim: int, subset: Optional[Union[Series, ABCDataFrame]] = None, ) -> Union[Series, ABCDataFrame]: """ Sub-classes to define. Return a sliced object. Parameters ---------- key : string / list of selections ndim : 1,2 requested ndim of result subset : object, default None subset to act on """ if subset is None: subset = self elif subset.ndim == 1: # is Series return subset # TODO: _shallow_copy(subset)? return subset[key] _agg_summary_and_see_also_doc = dedent(""" The aggregation operations are always performed over an axis, either the index (default) or the column axis. This behavior is different from `numpy` aggregation functions (`mean`, `median`, `prod`, `sum`, `std`, `var`), where the default is to compute the aggregation of the flattened array, e.g., ``numpy.mean(arr_2d)`` as opposed to ``numpy.mean(arr_2d, axis=0)``. `agg` is an alias for `aggregate`. Use the alias. See Also -------- DataFrame.apply : Perform any type of operations. DataFrame.transform : Perform transformation type operations. core.groupby.GroupBy : Perform operations over groups. core.resample.Resampler : Perform operations over resampled bins. core.window.Rolling : Perform operations over rolling window. core.window.Expanding : Perform operations over expanding window. core.window.EWM : Perform operation over exponential weighted window. """) _agg_examples_doc = dedent(""" Examples -------- >>> df = pd.DataFrame([[1, 2, 3], ... [4, 5, 6], ... [7, 8, 9], ... [np.nan, np.nan, np.nan]], ... columns=['A', 'B', 'C']) Aggregate these functions over the rows. >>> df.agg(['sum', 'min']) A B C sum 12.0 15.0 18.0 min 1.0 2.0 3.0 Different aggregations per column. >>> df.agg({'A' : ['sum', 'min'], 'B' : ['min', 'max']}) A B max NaN 8.0 min 1.0 2.0 sum 12.0 NaN Aggregate over the columns. >>> df.agg("mean", axis="columns") 0 2.0 1 5.0 2 8.0 3 NaN dtype: float64 """) @Substitution(see_also=_agg_summary_and_see_also_doc, examples=_agg_examples_doc, versionadded='\n.. versionadded:: 0.20.0\n', **_shared_doc_kwargs) @Appender(_shared_docs['aggregate']) def aggregate(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) result = None try: result, how = self._aggregate(func, axis=axis, *args, **kwargs) except TypeError: pass if result is None: return self.apply(func, axis=axis, args=args, **kwargs) return result def _aggregate(self, arg, axis=0, *args, **kwargs): if axis == 1: # NDFrame.aggregate returns a tuple, and we need to transpose # only result result, how = self.T._aggregate(arg, *args, **kwargs) result = result.T if result is not None else result return result, how return super()._aggregate(arg, *args, **kwargs) agg = aggregate @Appender(_shared_docs['transform'] % _shared_doc_kwargs) def transform(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) if axis == 1: return self.T.transform(func, *args, **kwargs).T return super().transform(func, *args, **kwargs) def apply(self, func, axis=0, broadcast=None, raw=False, reduce=None, result_type=None, args=(), **kwds): """ Apply a function along an axis of the DataFrame. Objects passed to the function are Series objects whose index is either the DataFrame's index (``axis=0``) or the DataFrame's columns (``axis=1``). By default (``result_type=None``), the final return type is inferred from the return type of the applied function. Otherwise, it depends on the `result_type` argument. Parameters ---------- func : function Function to apply to each column or row. axis : {0 or 'index', 1 or 'columns'}, default 0 Axis along which the function is applied: * 0 or 'index': apply function to each column. * 1 or 'columns': apply function to each row. broadcast : bool, optional Only relevant for aggregation functions: * ``False`` or ``None`` : returns a Series whose length is the length of the index or the number of columns (based on the `axis` parameter) * ``True`` : results will be broadcast to the original shape of the frame, the original index and columns will be retained. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by result_type='broadcast'. raw : bool, default False * ``False`` : passes each row or column as a Series to the function. * ``True`` : the passed function will receive ndarray objects instead. If you are just applying a NumPy reduction function this will achieve much better performance. reduce : bool or None, default None Try to apply reduction procedures. If the DataFrame is empty, `apply` will use `reduce` to determine whether the result should be a Series or a DataFrame. If ``reduce=None`` (the default), `apply`'s return value will be guessed by calling `func` on an empty Series (note: while guessing, exceptions raised by `func` will be ignored). If ``reduce=True`` a Series will always be returned, and if ``reduce=False`` a DataFrame will always be returned. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by ``result_type='reduce'``. result_type : {'expand', 'reduce', 'broadcast', None}, default None These only act when ``axis=1`` (columns): * 'expand' : list-like results will be turned into columns. * 'reduce' : returns a Series if possible rather than expanding list-like results. This is the opposite of 'expand'. * 'broadcast' : results will be broadcast to the original shape of the DataFrame, the original index and columns will be retained. The default behaviour (None) depends on the return value of the applied function: list-like results will be returned as a Series of those. However if the apply function returns a Series these are expanded to columns. .. versionadded:: 0.23.0 args : tuple Positional arguments to pass to `func` in addition to the array/series. **kwds Additional keyword arguments to pass as keywords arguments to `func`. Returns ------- Series or DataFrame Result of applying ``func`` along the given axis of the DataFrame. See Also -------- DataFrame.applymap: For elementwise operations. DataFrame.aggregate: Only perform aggregating type operations. DataFrame.transform: Only perform transforming type operations. Notes ----- In the current implementation apply calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[4, 9]] * 3, columns=['A', 'B']) >>> df A B 0 4 9 1 4 9 2 4 9 Using a numpy universal function (in this case the same as ``np.sqrt(df)``): >>> df.apply(np.sqrt) A B 0 2.0 3.0 1 2.0 3.0 2 2.0 3.0 Using a reducing function on either axis >>> df.apply(np.sum, axis=0) A 12 B 27 dtype: int64 >>> df.apply(np.sum, axis=1) 0 13 1 13 2 13 dtype: int64 Returning a list-like will result in a Series >>> df.apply(lambda x: [1, 2], axis=1) 0 [1, 2] 1 [1, 2] 2 [1, 2] dtype: object Passing result_type='expand' will expand list-like results to columns of a Dataframe >>> df.apply(lambda x: [1, 2], axis=1, result_type='expand') 0 1 0 1 2 1 1 2 2 1 2 Returning a Series inside the function is similar to passing ``result_type='expand'``. The resulting column names will be the Series index. >>> df.apply(lambda x: pd.Series([1, 2], index=['foo', 'bar']), axis=1) foo bar 0 1 2 1 1 2 2 1 2 Passing ``result_type='broadcast'`` will ensure the same shape result, whether list-like or scalar is returned by the function, and broadcast it along the axis. The resulting column names will be the originals. >>> df.apply(lambda x: [1, 2], axis=1, result_type='broadcast') A B 0 1 2 1 1 2 2 1 2 """ from pandas.core.apply import frame_apply op = frame_apply(self, func=func, axis=axis, broadcast=broadcast, raw=raw, reduce=reduce, result_type=result_type, args=args, kwds=kwds) return op.get_result() def applymap(self, func): """ Apply a function to a Dataframe elementwise. This method applies a function that accepts and returns a scalar to every element of a DataFrame. Parameters ---------- func : callable Python function, returns a single value from a single value. Returns ------- DataFrame Transformed DataFrame. See Also -------- DataFrame.apply : Apply a function along input axis of DataFrame. Notes ----- In the current implementation applymap calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[1, 2.12], [3.356, 4.567]]) >>> df 0 1 0 1.000 2.120 1 3.356 4.567 >>> df.applymap(lambda x: len(str(x))) 0 1 0 3 4 1 5 5 Note that a vectorized version of `func` often exists, which will be much faster. You could square each number elementwise. >>> df.applymap(lambda x: x**2) 0 1 0 1.000000 4.494400 1 11.262736 20.857489 But it's better to avoid applymap in that case. >>> df ** 2 0 1 0 1.000000 4.494400 1 11.262736 20.857489 """ # if we have a dtype == 'M8[ns]', provide boxed values def infer(x): if x.empty: return lib.map_infer(x, func) return lib.map_infer(x.astype(object).values, func) return self.apply(infer) # ---------------------------------------------------------------------- # Merging / joining methods def append(self, other, ignore_index=False, verify_integrity=False, sort=None): """ Append rows of `other` to the end of caller, returning a new object. Columns in `other` that are not in the caller are added as new columns. Parameters ---------- other : DataFrame or Series/dict-like object, or list of these The data to append. ignore_index : boolean, default False If True, do not use the index labels. verify_integrity : boolean, default False If True, raise ValueError on creating index with duplicates. sort : boolean, default None Sort columns if the columns of `self` and `other` are not aligned. The default sorting is deprecated and will change to not-sorting in a future version of pandas. Explicitly pass ``sort=True`` to silence the warning and sort. Explicitly pass ``sort=False`` to silence the warning and not sort. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- concat : General function to concatenate DataFrame or Series objects. Notes ----- If a list of dict/series is passed and the keys are all contained in the DataFrame's index, the order of the columns in the resulting DataFrame will be unchanged. Iteratively appending rows to a DataFrame can be more computationally intensive than a single concatenate. A better solution is to append those rows to a list and then concatenate the list with the original DataFrame all at once. Examples -------- >>> df = pd.DataFrame([[1, 2], [3, 4]], columns=list('AB')) >>> df A B 0 1 2 1 3 4 >>> df2 = pd.DataFrame([[5, 6], [7, 8]], columns=list('AB')) >>> df.append(df2) A B 0 1 2 1 3 4 0 5 6 1 7 8 With `ignore_index` set to True: >>> df.append(df2, ignore_index=True) A B 0 1 2 1 3 4 2 5 6 3 7 8 The following, while not recommended methods for generating DataFrames, show two ways to generate a DataFrame from multiple data sources. Less efficient: >>> df = pd.DataFrame(columns=['A']) >>> for i in range(5): ... df = df.append({'A': i}, ignore_index=True) >>> df A 0 0 1 1 2 2 3 3 4 4 More efficient: >>> pd.concat([pd.DataFrame([i], columns=['A']) for i in range(5)], ... ignore_index=True) A 0 0 1 1 2 2 3 3 4 4 """ if isinstance(other, (Series, dict)): if isinstance(other, dict): other = Series(other) if other.name is None and not ignore_index: raise TypeError('Can only append a Series if ignore_index=True' ' or if the Series has a name') if other.name is None: index = None else: # other must have the same index name as self, otherwise # index name will be reset index = Index([other.name], name=self.index.name) idx_diff = other.index.difference(self.columns) try: combined_columns = self.columns.append(idx_diff) except TypeError: combined_columns = self.columns.astype(object).append(idx_diff) other = other.reindex(combined_columns, copy=False) other = DataFrame(other.values.reshape((1, len(other))), index=index, columns=combined_columns) other = other._convert(datetime=True, timedelta=True) if not self.columns.equals(combined_columns): self = self.reindex(columns=combined_columns) elif isinstance(other, list) and not isinstance(other[0], DataFrame): other = DataFrame(other) if (self.columns.get_indexer(other.columns) >= 0).all(): other = other.reindex(columns=self.columns) from pandas.core.reshape.concat import concat if isinstance(other, (list, tuple)): to_concat = [self] + other else: to_concat = [self, other] return concat(to_concat, ignore_index=ignore_index, verify_integrity=verify_integrity, sort=sort) def join(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): """ Join columns of another DataFrame. Join columns with `other` DataFrame either on index or on a key column. Efficiently join multiple DataFrame objects by index at once by passing a list. Parameters ---------- other : DataFrame, Series, or list of DataFrame Index should be similar to one of the columns in this one. If a Series is passed, its name attribute must be set, and that will be used as the column name in the resulting joined DataFrame. on : str, list of str, or array-like, optional Column or index level name(s) in the caller to join on the index in `other`, otherwise joins index-on-index. If multiple values given, the `other` DataFrame must have a MultiIndex. Can pass an array as the join key if it is not already contained in the calling DataFrame. Like an Excel VLOOKUP operation. how : {'left', 'right', 'outer', 'inner'}, default 'left' How to handle the operation of the two objects. * left: use calling frame's index (or column if on is specified) * right: use `other`'s index. * outer: form union of calling frame's index (or column if on is specified) with `other`'s index, and sort it. lexicographically. * inner: form intersection of calling frame's index (or column if on is specified) with `other`'s index, preserving the order of the calling's one. lsuffix : str, default '' Suffix to use from left frame's overlapping columns. rsuffix : str, default '' Suffix to use from right frame's overlapping columns. sort : bool, default False Order result DataFrame lexicographically by the join key. If False, the order of the join key depends on the join type (how keyword). Returns ------- DataFrame A dataframe containing columns from both the caller and `other`. See Also -------- DataFrame.merge : For column(s)-on-columns(s) operations. Notes ----- Parameters `on`, `lsuffix`, and `rsuffix` are not supported when passing a list of `DataFrame` objects. Support for specifying index levels as the `on` parameter was added in version 0.23.0. Examples -------- >>> df = pd.DataFrame({'key': ['K0', 'K1', 'K2', 'K3', 'K4', 'K5'], ... 'A': ['A0', 'A1', 'A2', 'A3', 'A4', 'A5']}) >>> df key A 0 K0 A0 1 K1 A1 2 K2 A2 3 K3 A3 4 K4 A4 5 K5 A5 >>> other = pd.DataFrame({'key': ['K0', 'K1', 'K2'], ... 'B': ['B0', 'B1', 'B2']}) >>> other key B 0 K0 B0 1 K1 B1 2 K2 B2 Join DataFrames using their indexes. >>> df.join(other, lsuffix='_caller', rsuffix='_other') key_caller A key_other B 0 K0 A0 K0 B0 1 K1 A1 K1 B1 2 K2 A2 K2 B2 3 K3 A3 NaN NaN 4 K4 A4 NaN NaN 5 K5 A5 NaN NaN If we want to join using the key columns, we need to set key to be the index in both `df` and `other`. The joined DataFrame will have key as its index. >>> df.set_index('key').join(other.set_index('key')) A B key K0 A0 B0 K1 A1 B1 K2 A2 B2 K3 A3 NaN K4 A4 NaN K5 A5 NaN Another option to join using the key columns is to use the `on` parameter. DataFrame.join always uses `other`'s index but we can use any column in `df`. This method preserves the original DataFrame's index in the result. >>> df.join(other.set_index('key'), on='key') key A B 0 K0 A0 B0 1 K1 A1 B1 2 K2 A2 B2 3 K3 A3 NaN 4 K4 A4 NaN 5 K5 A5 NaN """ # For SparseDataFrame's benefit return self._join_compat(other, on=on, how=how, lsuffix=lsuffix, rsuffix=rsuffix, sort=sort) def _join_compat(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): from pandas.core.reshape.merge import merge from pandas.core.reshape.concat import concat if isinstance(other, Series): if other.name is None: raise ValueError('Other Series must have a name') other = DataFrame({other.name: other}) if isinstance(other, DataFrame): return merge(self, other, left_on=on, how=how, left_index=on is None, right_index=True, suffixes=(lsuffix, rsuffix), sort=sort) else: if on is not None: raise ValueError('Joining multiple DataFrames only supported' ' for joining on index') frames = [self] + list(other) can_concat = all(df.index.is_unique for df in frames) # join indexes only using concat if can_concat: if how == 'left': how = 'outer' join_axes = [self.index] else: join_axes = None return concat(frames, axis=1, join=how, join_axes=join_axes, verify_integrity=True) joined = frames[0] for frame in frames[1:]: joined = merge(joined, frame, how=how, left_index=True, right_index=True) return joined @Substitution('') @Appender(_merge_doc, indents=2) def merge(self, right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True, indicator=False, validate=None): from pandas.core.reshape.merge import merge return merge(self, right, how=how, on=on, left_on=left_on, right_on=right_on, left_index=left_index, right_index=right_index, sort=sort, suffixes=suffixes, copy=copy, indicator=indicator, validate=validate) def round(self, decimals=0, *args, **kwargs): """ Round a DataFrame to a variable number of decimal places. Parameters ---------- decimals : int, dict, Series Number of decimal places to round each column to. If an int is given, round each column to the same number of places. Otherwise dict and Series round to variable numbers of places. Column names should be in the keys if `decimals` is a dict-like, or in the index if `decimals` is a Series. Any columns not included in `decimals` will be left as is. Elements of `decimals` which are not columns of the input will be ignored. *args Additional keywords have no effect but might be accepted for compatibility with numpy. **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame A DataFrame with the affected columns rounded to the specified number of decimal places. See Also -------- numpy.around : Round a numpy array to the given number of decimals. Series.round : Round a Series to the given number of decimals. Examples -------- >>> df = pd.DataFrame([(.21, .32), (.01, .67), (.66, .03), (.21, .18)], ... columns=['dogs', 'cats']) >>> df dogs cats 0 0.21 0.32 1 0.01 0.67 2 0.66 0.03 3 0.21 0.18 By providing an integer each column is rounded to the same number of decimal places >>> df.round(1) dogs cats 0 0.2 0.3 1 0.0 0.7 2 0.7 0.0 3 0.2 0.2 With a dict, the number of places for specific columns can be specified with the column names as key and the number of decimal places as value >>> df.round({'dogs': 1, 'cats': 0}) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 Using a Series, the number of places for specific columns can be specified with the column names as index and the number of decimal places as value >>> decimals = pd.Series([0, 1], index=['cats', 'dogs']) >>> df.round(decimals) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 """ from pandas.core.reshape.concat import concat def _dict_round(df, decimals): for col, vals in df.iteritems(): try: yield _series_round(vals, decimals[col]) except KeyError: yield vals def _series_round(s, decimals): if is_integer_dtype(s) or is_float_dtype(s): return s.round(decimals) return s nv.validate_round(args, kwargs) if isinstance(decimals, (dict, Series)): if isinstance(decimals, Series): if not decimals.index.is_unique: raise ValueError("Index of decimals must be unique") new_cols = [col for col in _dict_round(self, decimals)] elif is_integer(decimals): # Dispatch to Series.round new_cols = [_series_round(v, decimals) for _, v in self.iteritems()] else: raise TypeError("decimals must be an integer, a dict-like or a " "Series") if len(new_cols) > 0: return self._constructor(concat(new_cols, axis=1), index=self.index, columns=self.columns) else: return self # ---------------------------------------------------------------------- # Statistical methods, etc. def corr(self, method='pearson', min_periods=1): """ Compute pairwise correlation of columns, excluding NA/null values. Parameters ---------- method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float. Note that the returned matrix from corr will have 1 along the diagonals and will be symmetric regardless of the callable's behavior .. versionadded:: 0.24.0 min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Currently only available for Pearson and Spearman correlation. Returns ------- DataFrame Correlation matrix. See Also -------- DataFrame.corrwith Series.corr Examples -------- >>> def histogram_intersection(a, b): ... v = np.minimum(a, b).sum().round(decimals=1) ... return v >>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats']) >>> df.corr(method=histogram_intersection) dogs cats dogs 1.0 0.3 cats 0.3 1.0 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if method == 'pearson': correl = libalgos.nancorr(ensure_float64(mat), minp=min_periods) elif method == 'spearman': correl = libalgos.nancorr_spearman(ensure_float64(mat), minp=min_periods) elif method == 'kendall' or callable(method): if min_periods is None: min_periods = 1 mat = ensure_float64(mat).T corrf = nanops.get_corr_func(method) K = len(cols) correl = np.empty((K, K), dtype=float) mask = np.isfinite(mat) for i, ac in enumerate(mat): for j, bc in enumerate(mat): if i > j: continue valid = mask[i] & mask[j] if valid.sum() < min_periods: c = np.nan elif i == j: c = 1. elif not valid.all(): c = corrf(ac[valid], bc[valid]) else: c = corrf(ac, bc) correl[i, j] = c correl[j, i] = c else: raise ValueError("method must be either 'pearson', " "'spearman', 'kendall', or a callable, " "'{method}' was supplied".format(method=method)) return self._constructor(correl, index=idx, columns=cols) def cov(self, min_periods=None): """ Compute pairwise covariance of columns, excluding NA/null values. Compute the pairwise covariance among the series of a DataFrame. The returned data frame is the `covariance matrix <https://en.wikipedia.org/wiki/Covariance_matrix>`__ of the columns of the DataFrame. Both NA and null values are automatically excluded from the calculation. (See the note below about bias from missing values.) A threshold can be set for the minimum number of observations for each value created. Comparisons with observations below this threshold will be returned as ``NaN``. This method is generally used for the analysis of time series data to understand the relationship between different measures across time. Parameters ---------- min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Returns ------- DataFrame The covariance matrix of the series of the DataFrame. See Also -------- Series.cov : Compute covariance with another Series. core.window.EWM.cov: Exponential weighted sample covariance. core.window.Expanding.cov : Expanding sample covariance. core.window.Rolling.cov : Rolling sample covariance. Notes ----- Returns the covariance matrix of the DataFrame's time series. The covariance is normalized by N-1. For DataFrames that have Series that are missing data (assuming that data is `missing at random <https://en.wikipedia.org/wiki/Missing_data#Missing_at_random>`__) the returned covariance matrix will be an unbiased estimate of the variance and covariance between the member Series. However, for many applications this estimate may not be acceptable because the estimate covariance matrix is not guaranteed to be positive semi-definite. This could lead to estimate correlations having absolute values which are greater than one, and/or a non-invertible covariance matrix. See `Estimation of covariance matrices <http://en.wikipedia.org/w/index.php?title=Estimation_of_covariance_ matrices>`__ for more details. Examples -------- >>> df = pd.DataFrame([(1, 2), (0, 3), (2, 0), (1, 1)], ... columns=['dogs', 'cats']) >>> df.cov() dogs cats dogs 0.666667 -1.000000 cats -1.000000 1.666667 >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(1000, 5), ... columns=['a', 'b', 'c', 'd', 'e']) >>> df.cov() a b c d e a 0.998438 -0.020161 0.059277 -0.008943 0.014144 b -0.020161 1.059352 -0.008543 -0.024738 0.009826 c 0.059277 -0.008543 1.010670 -0.001486 -0.000271 d -0.008943 -0.024738 -0.001486 0.921297 -0.013692 e 0.014144 0.009826 -0.000271 -0.013692 0.977795 **Minimum number of periods** This method also supports an optional ``min_periods`` keyword that specifies the required minimum number of non-NA observations for each column pair in order to have a valid result: >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(20, 3), ... columns=['a', 'b', 'c']) >>> df.loc[df.index[:5], 'a'] = np.nan >>> df.loc[df.index[5:10], 'b'] = np.nan >>> df.cov(min_periods=12) a b c a 0.316741 NaN -0.150812 b NaN 1.248003 0.191417 c -0.150812 0.191417 0.895202 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if notna(mat).all(): if min_periods is not None and min_periods > len(mat): baseCov = np.empty((mat.shape[1], mat.shape[1])) baseCov.fill(np.nan) else: baseCov = np.cov(mat.T) baseCov = baseCov.reshape((len(cols), len(cols))) else: baseCov = libalgos.nancorr(ensure_float64(mat), cov=True, minp=min_periods) return self._constructor(baseCov, index=idx, columns=cols) def corrwith(self, other, axis=0, drop=False, method='pearson'): """ Compute pairwise correlation between rows or columns of DataFrame with rows or columns of Series or DataFrame. DataFrames are first aligned along both axes before computing the correlations. Parameters ---------- other : DataFrame, Series Object with which to compute correlations. axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' to compute column-wise, 1 or 'columns' for row-wise. drop : bool, default False Drop missing indices from result. method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float .. versionadded:: 0.24.0 Returns ------- Series Pairwise correlations. See Also -------- DataFrame.corr """ axis = self._get_axis_number(axis) this = self._get_numeric_data() if isinstance(other, Series): return this.apply(lambda x: other.corr(x, method=method), axis=axis) other = other._get_numeric_data() left, right = this.align(other, join='inner', copy=False) if axis == 1: left = left.T right = right.T if method == 'pearson': # mask missing values left = left + right * 0 right = right + left * 0 # demeaned data ldem = left - left.mean() rdem = right - right.mean() num = (ldem * rdem).sum() dom = (left.count() - 1) * left.std() * right.std() correl = num / dom elif method in ['kendall', 'spearman'] or callable(method): def c(x): return nanops.nancorr(x[0], x[1], method=method) correl = Series(map(c, zip(left.values.T, right.values.T)), index=left.columns) else: raise ValueError("Invalid method {method} was passed, " "valid methods are: 'pearson', 'kendall', " "'spearman', or callable". format(method=method)) if not drop: # Find non-matching labels along the given axis # and append missing correlations (GH 22375) raxis = 1 if axis == 0 else 0 result_index = (this._get_axis(raxis). union(other._get_axis(raxis))) idx_diff = result_index.difference(correl.index) if len(idx_diff) > 0: correl = correl.append(Series([np.nan] * len(idx_diff), index=idx_diff)) return correl # ---------------------------------------------------------------------- # ndarray-like stats methods def count(self, axis=0, level=None, numeric_only=False): """ Count non-NA cells for each column or row. The values `None`, `NaN`, `NaT`, and optionally `numpy.inf` (depending on `pandas.options.mode.use_inf_as_na`) are considered NA. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index' counts are generated for each column. If 1 or 'columns' counts are generated for each **row**. level : int or str, optional If the axis is a `MultiIndex` (hierarchical), count along a particular `level`, collapsing into a `DataFrame`. A `str` specifies the level name. numeric_only : bool, default False Include only `float`, `int` or `boolean` data. Returns ------- Series or DataFrame For each column/row the number of non-NA/null entries. If `level` is specified returns a `DataFrame`. See Also -------- Series.count: Number of non-NA elements in a Series. DataFrame.shape: Number of DataFrame rows and columns (including NA elements). DataFrame.isna: Boolean same-sized DataFrame showing places of NA elements. Examples -------- Constructing DataFrame from a dictionary: >>> df = pd.DataFrame({"Person": ... ["John", "Myla", "Lewis", "John", "Myla"], ... "Age": [24., np.nan, 21., 33, 26], ... "Single": [False, True, True, True, False]}) >>> df Person Age Single 0 John 24.0 False 1 Myla NaN True 2 Lewis 21.0 True 3 John 33.0 True 4 Myla 26.0 False Notice the uncounted NA values: >>> df.count() Person 5 Age 4 Single 5 dtype: int64 Counts for each **row**: >>> df.count(axis='columns') 0 3 1 2 2 3 3 3 4 3 dtype: int64 Counts for one level of a `MultiIndex`: >>> df.set_index(["Person", "Single"]).count(level="Person") Age Person John 2 Lewis 1 Myla 1 """ axis = self._get_axis_number(axis) if level is not None: return self._count_level(level, axis=axis, numeric_only=numeric_only) if numeric_only: frame = self._get_numeric_data() else: frame = self # GH #423 if len(frame._get_axis(axis)) == 0: result = Series(0, index=frame._get_agg_axis(axis)) else: if frame._is_mixed_type or frame._data.any_extension_types: # the or any_extension_types is really only hit for single- # column frames with an extension array result = notna(frame).sum(axis=axis) else: # GH13407 series_counts = notna(frame).sum(axis=axis) counts = series_counts.values result = Series(counts, index=frame._get_agg_axis(axis)) return result.astype('int64') def _count_level(self, level, axis=0, numeric_only=False): if numeric_only: frame = self._get_numeric_data() else: frame = self count_axis = frame._get_axis(axis) agg_axis = frame._get_agg_axis(axis) if not isinstance(count_axis, MultiIndex): raise TypeError("Can only count levels on hierarchical " "{ax}.".format(ax=self._get_axis_name(axis))) if frame._is_mixed_type: # Since we have mixed types, calling notna(frame.values) might # upcast everything to object mask = notna(frame).values else: # But use the speedup when we have homogeneous dtypes mask = notna(frame.values) if axis == 1: # We're transposing the mask rather than frame to avoid potential # upcasts to object, which induces a ~20x slowdown mask = mask.T if isinstance(level, str): level = count_axis._get_level_number(level) level_index = count_axis.levels[level] level_codes = ensure_int64(count_axis.codes[level]) counts = lib.count_level_2d(mask, level_codes, len(level_index), axis=0) result = DataFrame(counts, index=level_index, columns=agg_axis) if axis == 1: # Undo our earlier transpose return result.T else: return result def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None, filter_type=None, **kwds): if axis is None and filter_type == 'bool': labels = None constructor = None else: # TODO: Make other agg func handle axis=None properly axis = self._get_axis_number(axis) labels = self._get_agg_axis(axis) constructor = self._constructor def f(x): return op(x, axis=axis, skipna=skipna, **kwds) # exclude timedelta/datetime unless we are uniform types if (axis == 1 and self._is_datelike_mixed_type and (not self._is_homogeneous_type and not is_datetime64tz_dtype(self.dtypes[0]))): numeric_only = True if numeric_only is None: try: values = self.values result = f(values) if (filter_type == 'bool' and is_object_dtype(values) and axis is None): # work around https://github.com/numpy/numpy/issues/10489 # TODO: combine with hasattr(result, 'dtype') further down # hard since we don't have `values` down there. result = np.bool_(result) except Exception as e: # try by-column first if filter_type is None and axis == 0: try: # this can end up with a non-reduction # but not always. if the types are mixed # with datelike then need to make sure a series # we only end up here if we have not specified # numeric_only and yet we have tried a # column-by-column reduction, where we have mixed type. # So let's just do what we can from pandas.core.apply import frame_apply opa = frame_apply(self, func=f, result_type='expand', ignore_failures=True) result = opa.get_result() if result.ndim == self.ndim: result = result.iloc[0] return result except Exception: pass if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': data = self._get_bool_data() else: # pragma: no cover e = NotImplementedError( "Handling exception with filter_type {f} not" "implemented.".format(f=filter_type)) raise_with_traceback(e) with np.errstate(all='ignore'): result = f(data.values) labels = data._get_agg_axis(axis) else: if numeric_only: if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': # GH 25101, # GH 24434 data = self._get_bool_data() if axis == 0 else self else: # pragma: no cover msg = ("Generating numeric_only data with filter_type {f}" "not supported.".format(f=filter_type)) raise NotImplementedError(msg) values = data.values labels = data._get_agg_axis(axis) else: values = self.values result = f(values) if hasattr(result, 'dtype') and is_object_dtype(result.dtype): try: if filter_type is None or filter_type == 'numeric': result = result.astype(np.float64) elif filter_type == 'bool' and notna(result).all(): result = result.astype(np.bool_) except (ValueError, TypeError): # try to coerce to the original dtypes item by item if we can if axis == 0: result = coerce_to_dtypes(result, self.dtypes) if constructor is not None: result = Series(result, index=labels) return result def nunique(self, axis=0, dropna=True): """ Count distinct observations over requested axis. Return Series with number of distinct observations. Can ignore NaN values. .. versionadded:: 0.20.0 Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to use. 0 or 'index' for row-wise, 1 or 'columns' for column-wise. dropna : bool, default True Don't include NaN in the counts. Returns ------- Series See Also -------- Series.nunique: Method nunique for Series. DataFrame.count: Count non-NA cells for each column or row. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], 'B': [1, 1, 1]}) >>> df.nunique() A 3 B 1 dtype: int64 >>> df.nunique(axis=1) 0 1 1 2 2 2 dtype: int64 """ return self.apply(Series.nunique, axis=axis, dropna=dropna) def idxmin(self, axis=0, skipna=True): """ Return index of first occurrence of minimum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of minima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmin Notes ----- This method is the DataFrame version of ``ndarray.argmin``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmin(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def idxmax(self, axis=0, skipna=True): """ Return index of first occurrence of maximum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of maxima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmax Notes ----- This method is the DataFrame version of ``ndarray.argmax``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmax(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def _get_agg_axis(self, axis_num): """ Let's be explicit about this. """ if axis_num == 0: return self.columns elif axis_num == 1: return self.index else: raise ValueError('Axis must be 0 or 1 (got %r)' % axis_num) def mode(self, axis=0, numeric_only=False, dropna=True): """ Get the mode(s) of each element along the selected axis. The mode of a set of values is the value that appears most often. It can be multiple values. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to iterate over while searching for the mode: * 0 or 'index' : get mode of each column * 1 or 'columns' : get mode of each row numeric_only : bool, default False If True, only apply to numeric columns. dropna : bool, default True Don't consider counts of NaN/NaT. .. versionadded:: 0.24.0 Returns ------- DataFrame The modes of each column or row. See Also -------- Series.mode : Return the highest frequency value in a Series. Series.value_counts : Return the counts of values in a Series. Examples -------- >>> df = pd.DataFrame([('bird', 2, 2), ... ('mammal', 4, np.nan), ... ('arthropod', 8, 0), ... ('bird', 2, np.nan)], ... index=('falcon', 'horse', 'spider', 'ostrich'), ... columns=('species', 'legs', 'wings')) >>> df species legs wings falcon bird 2 2.0 horse mammal 4 NaN spider arthropod 8 0.0 ostrich bird 2 NaN By default, missing values are not considered, and the mode of wings are both 0 and 2. The second row of species and legs contains ``NaN``, because they have only one mode, but the DataFrame has two rows. >>> df.mode() species legs wings 0 bird 2.0 0.0 1 NaN NaN 2.0 Setting ``dropna=False`` ``NaN`` values are considered and they can be the mode (like for wings). >>> df.mode(dropna=False) species legs wings 0 bird 2 NaN Setting ``numeric_only=True``, only the mode of numeric columns is computed, and columns of other types are ignored. >>> df.mode(numeric_only=True) legs wings 0 2.0 0.0 1 NaN 2.0 To compute the mode over columns and not rows, use the axis parameter: >>> df.mode(axis='columns', numeric_only=True) 0 1 falcon 2.0 NaN horse 4.0 NaN spider 0.0 8.0 ostrich 2.0 NaN """ data = self if not numeric_only else self._get_numeric_data() def f(s): return s.mode(dropna=dropna) return data.apply(f, axis=axis) def quantile(self, q=0.5, axis=0, numeric_only=True, interpolation='linear'): """ Return values at the given quantile over requested axis. Parameters ---------- q : float or array-like, default 0.5 (50% quantile) Value between 0 <= q <= 1, the quantile(s) to compute. axis : {0, 1, 'index', 'columns'} (default 0) Equals 0 or 'index' for row-wise, 1 or 'columns' for column-wise. numeric_only : bool, default True If False, the quantile of datetime and timedelta data will be computed as well. interpolation : {'linear', 'lower', 'higher', 'midpoint', 'nearest'} This optional parameter specifies the interpolation method to use, when the desired quantile lies between two data points `i` and `j`: * linear: `i + (j - i) * fraction`, where `fraction` is the fractional part of the index surrounded by `i` and `j`. * lower: `i`. * higher: `j`. * nearest: `i` or `j` whichever is nearest. * midpoint: (`i` + `j`) / 2. .. versionadded:: 0.18.0 Returns ------- Series or DataFrame If ``q`` is an array, a DataFrame will be returned where the index is ``q``, the columns are the columns of self, and the values are the quantiles. If ``q`` is a float, a Series will be returned where the index is the columns of self and the values are the quantiles. See Also -------- core.window.Rolling.quantile: Rolling quantile. numpy.percentile: Numpy function to compute the percentile. Examples -------- >>> df = pd.DataFrame(np.array([[1, 1], [2, 10], [3, 100], [4, 100]]), ... columns=['a', 'b']) >>> df.quantile(.1) a 1.3 b 3.7 Name: 0.1, dtype: float64 >>> df.quantile([.1, .5]) a b 0.1 1.3 3.7 0.5 2.5 55.0 Specifying `numeric_only=False` will also compute the quantile of datetime and timedelta data. >>> df = pd.DataFrame({'A': [1, 2], ... 'B': [pd.Timestamp('2010'), ... pd.Timestamp('2011')], ... 'C': [pd.Timedelta('1 days'), ... pd.Timedelta('2 days')]}) >>> df.quantile(0.5, numeric_only=False) A 1.5 B 2010-07-02 12:00:00 C 1 days 12:00:00 Name: 0.5, dtype: object """ self._check_percentile(q) data = self._get_numeric_data() if numeric_only else self axis = self._get_axis_number(axis) is_transposed = axis == 1 if is_transposed: data = data.T result = data._data.quantile(qs=q, axis=1, interpolation=interpolation, transposed=is_transposed) if result.ndim == 2: result = self._constructor(result) else: result = self._constructor_sliced(result, name=q) if is_transposed: result = result.T return result def to_timestamp(self, freq=None, how='start', axis=0, copy=True): """ Cast to DatetimeIndex of timestamps, at *beginning* of period. Parameters ---------- freq : str, default frequency of PeriodIndex Desired frequency. how : {'s', 'e', 'start', 'end'} Convention for converting period to timestamp; start of period vs. end. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- DataFrame with DatetimeIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_timestamp(freq=freq, how=how)) elif axis == 1: new_data.set_axis(0, self.columns.to_timestamp(freq=freq, how=how)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def to_period(self, freq=None, axis=0, copy=True): """ Convert DataFrame from DatetimeIndex to PeriodIndex with desired frequency (inferred from index if not passed). Parameters ---------- freq : str, default Frequency of the PeriodIndex. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- TimeSeries with PeriodIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_period(freq=freq)) elif axis == 1: new_data.set_axis(0, self.columns.to_period(freq=freq)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def isin(self, values): """ Whether each element in the DataFrame is contained in values. Parameters ---------- values : iterable, Series, DataFrame or dict The result will only be true at a location if all the labels match. If `values` is a Series, that's the index. If `values` is a dict, the keys must be the column names, which must match. If `values` is a DataFrame, then both the index and column labels must match. Returns ------- DataFrame DataFrame of booleans showing whether each element in the DataFrame is contained in values. See Also -------- DataFrame.eq: Equality test for DataFrame. Series.isin: Equivalent method on Series. Series.str.contains: Test if pattern or regex is contained within a string of a Series or Index. Examples -------- >>> df = pd.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]}, ... index=['falcon', 'dog']) >>> df num_legs num_wings falcon 2 2 dog 4 0 When ``values`` is a list check whether every value in the DataFrame is present in the list (which animals have 0 or 2 legs or wings) >>> df.isin([0, 2]) num_legs num_wings falcon True True dog False True When ``values`` is a dict, we can pass values to check for each column separately: >>> df.isin({'num_wings': [0, 3]}) num_legs num_wings falcon False False dog False True When ``values`` is a Series or DataFrame the index and column must match. Note that 'falcon' does not match based on the number of legs in df2. >>> other = pd.DataFrame({'num_legs': [8, 2], 'num_wings': [0, 2]}, ... index=['spider', 'falcon']) >>> df.isin(other) num_legs num_wings falcon True True dog False False """ if isinstance(values, dict): from pandas.core.reshape.concat import concat values = collections.defaultdict(list, values) return concat((self.iloc[:, [i]].isin(values[col]) for i, col in enumerate(self.columns)), axis=1) elif isinstance(values, Series): if not values.index.is_unique: raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self), axis='index') elif isinstance(values, DataFrame): if not (values.columns.is_unique and values.index.is_unique): raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self)) else: if not is_list_like(values): raise TypeError("only list-like or dict-like objects are " "allowed to be passed to DataFrame.isin(), " "you passed a " "{0!r}".format(type(values).__name__)) return DataFrame( algorithms.isin(self.values.ravel(), values).reshape(self.shape), self.index, self.columns) # ---------------------------------------------------------------------- # Add plotting methods to DataFrame plot = CachedAccessor("plot", pandas.plotting.FramePlotMethods) hist = pandas.plotting.hist_frame boxplot = pandas.plotting.boxplot_frame sparse = CachedAccessor("sparse", SparseFrameAccessor) DataFrame._setup_axes(['index', 'columns'], info_axis=1, stat_axis=0, axes_are_reversed=True, aliases={'rows': 0}, docs={ 'index': 'The index (row labels) of the DataFrame.', 'columns': 'The column labels of the DataFrame.'}) DataFrame._add_numeric_operations() DataFrame._add_series_or_dataframe_operations() ops.add_flex_arithmetic_methods(DataFrame) ops.add_special_arithmetic_methods(DataFrame) def _from_nested_dict(data): # TODO: this should be seriously cythonized new_data = OrderedDict() for index, s in data.items(): for col, v in s.items(): new_data[col] = new_data.get(col, OrderedDict()) new_data[col][index] = v return new_data def _put_str(s, space): return '{s}'.format(s=s)[:space].ljust(space)
from datetime import timedelta import numpy as np import pytest import pandas as pd from pandas import Timedelta, TimedeltaIndex, timedelta_range, to_timedelta from pandas.core.arrays import TimedeltaArray import pandas.util.testing as tm class TestTimedeltaIndex: def test_verify_integrity_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(['1 Day'], verify_integrity=False) def test_range_kwargs_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 Day', end='3 Days', freq='D') def test_int64_nocopy(self): # GH#23539 check that a copy isn't made when we pass int64 data # and copy=False arr = np.arange(10, dtype=np.int64) tdi = TimedeltaIndex(arr, copy=False) assert tdi._data._data.base is arr def test_infer_from_tdi(self): # GH#23539 # fast-path for inferring a frequency if the passed data already # has one tdi = pd.timedelta_range('1 second', periods=10**7, freq='1s') result = pd.TimedeltaIndex(tdi, freq='infer') assert result.freq == tdi.freq # check that inferred_freq was not called by checking that the # value has not been cached assert "inferred_freq" not in getattr(result, "_cache", {}) def test_infer_from_tdi_mismatch(self): # GH#23539 # fast-path for invalidating a frequency if the passed data already # has one and it does not match the `freq` input tdi = pd.timedelta_range('1 second', periods=100, freq='1s') msg = ("Inferred frequency .* from passed values does " "not conform to passed frequency") with pytest.raises(ValueError, match=msg): TimedeltaIndex(tdi, freq='D') with pytest.raises(ValueError, match=msg): # GH#23789 TimedeltaArray(tdi, freq='D') def test_dt64_data_invalid(self): # GH#23539 # passing tz-aware DatetimeIndex raises, naive or ndarray[datetime64] # does not yet, but will in the future dti = pd.date_range('2016-01-01', periods=3) msg = "cannot be converted to timedelta64" with pytest.raises(TypeError, match=msg): TimedeltaIndex(dti.tz_localize('Europe/Brussels')) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(dti) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(np.asarray(dti)) def test_float64_ns_rounded(self): # GH#23539 without specifying a unit, floats are regarded as nanos, # and fractional portions are truncated tdi = TimedeltaIndex([2.3, 9.7]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # integral floats are non-lossy tdi = TimedeltaIndex([2.0, 9.0]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # NaNs get converted to NaT tdi = TimedeltaIndex([2.0, np.nan]) expected = TimedeltaIndex([pd.Timedelta(nanoseconds=2), pd.NaT]) tm.assert_index_equal(tdi, expected) def test_float64_unit_conversion(self): # GH#23539 tdi = TimedeltaIndex([1.5, 2.25], unit='D') expected = TimedeltaIndex([Timedelta(days=1.5), Timedelta(days=2.25)]) tm.assert_index_equal(tdi, expected) def test_construction_base_constructor(self): arr = [pd.Timedelta('1 days'), pd.NaT, pd.Timedelta('3 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) arr = [np.nan, pd.NaT, pd.Timedelta('1 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) def test_constructor(self): expected = TimedeltaIndex(['1 days', '1 days 00:00:05', '2 days', '2 days 00:00:02', '0 days 00:00:03']) result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) tm.assert_index_equal(result, expected) # unicode result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:01', '0 days 00:00:02']) tm.assert_index_equal(TimedeltaIndex(range(3), unit='s'), expected) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:05', '0 days 00:00:09']) tm.assert_index_equal(TimedeltaIndex([0, 5, 9], unit='s'), expected) expected = TimedeltaIndex( ['0 days 00:00:00.400', '0 days 00:00:00.450', '0 days 00:00:01.200']) tm.assert_index_equal(TimedeltaIndex([400, 450, 1200], unit='ms'), expected) def test_constructor_iso(self): # GH #21877 expected = timedelta_range('1s', periods=9, freq='s') durations = ['P0DT0H0M{}S'.format(i) for i in range(1, 10)] result = to_timedelta(durations) tm.assert_index_equal(result, expected) def test_constructor_coverage(self): rng = timedelta_range('1 days', periods=10.5) exp = timedelta_range('1 days', periods=10) tm.assert_index_equal(rng, exp) msg = 'periods must be a number, got foo' with pytest.raises(TypeError, match=msg): timedelta_range(start='1 days', periods='foo', freq='D') with pytest.raises(ValueError): with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 days', end='10 days') with pytest.raises(TypeError): TimedeltaIndex('1 days') # generator expression gen = (timedelta(i) for i in range(10)) result = TimedeltaIndex(gen) expected = TimedeltaIndex([timedelta(i) for i in range(10)]) tm.assert_index_equal(result, expected) # NumPy string array strings = np.array(['1 days', '2 days', '3 days']) result = TimedeltaIndex(strings) expected = to_timedelta([1, 2, 3], unit='d') tm.assert_index_equal(result, expected) from_ints = TimedeltaIndex(expected.asi8) tm.assert_index_equal(from_ints, expected) # non-conforming freq msg = ("Inferred frequency None from passed values does not conform to" " passed frequency D") with pytest.raises(ValueError, match=msg): TimedeltaIndex(['1 days', '2 days', '4 days'], freq='D') msg = ("Of the four parameters: start, end, periods, and freq, exactly" " three must be specified") with pytest.raises(ValueError, match=msg): timedelta_range(periods=10, freq='D') def test_constructor_name(self): idx = timedelta_range(start='1 days', periods=1, freq='D', name='TEST') assert idx.name == 'TEST' # GH10025 idx2 = TimedeltaIndex(idx, name='something else') assert idx2.name == 'something else' def test_constructor_no_precision_warns(self): # GH-24753, GH-24739 expected = pd.TimedeltaIndex(['2000'], dtype='timedelta64[ns]') # we set the stacklevel for DatetimeIndex with tm.assert_produces_warning(FutureWarning): result = pd.TimedeltaIndex(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = pd.Index(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) def test_constructor_wrong_precision_raises(self): with pytest.raises(ValueError): pd.TimedeltaIndex(['2000'], dtype='timedelta64[us]')
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_construction.py
pandas/core/frame.py
""" manage PyTables query interface via Expressions """ import ast from functools import partial import numpy as np from pandas._libs.tslibs import Timedelta, Timestamp from pandas.compat.chainmap import DeepChainMap from pandas.core.dtypes.common import is_list_like import pandas as pd from pandas.core.base import StringMixin import pandas.core.common as com from pandas.core.computation import expr, ops from pandas.core.computation.common import _ensure_decoded from pandas.core.computation.expr import BaseExprVisitor from pandas.core.computation.ops import UndefinedVariableError, is_term from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded class Scope(expr.Scope): __slots__ = 'queryables', def __init__(self, level, global_dict=None, local_dict=None, queryables=None): super().__init__(level + 1, global_dict=global_dict, local_dict=local_dict) self.queryables = queryables or dict() class Term(ops.Term): def __new__(cls, name, env, side=None, encoding=None): klass = Constant if not isinstance(name, str) else cls supr_new = StringMixin.__new__ return supr_new(klass) def __init__(self, name, env, side=None, encoding=None): super().__init__(name, env, side=side, encoding=encoding) def _resolve_name(self): # must be a queryables if self.side == 'left': if self.name not in self.env.queryables: raise NameError('name {name!r} is not defined' .format(name=self.name)) return self.name # resolve the rhs (and allow it to be None) try: return self.env.resolve(self.name, is_local=False) except UndefinedVariableError: return self.name # read-only property overwriting read/write property @property # type: ignore def value(self): return self._value class Constant(Term): def __init__(self, value, env, side=None, encoding=None): super().__init__(value, env, side=side, encoding=encoding) def _resolve_name(self): return self._name class BinOp(ops.BinOp): _max_selectors = 31 def __init__(self, op, lhs, rhs, queryables, encoding): super().__init__(op, lhs, rhs) self.queryables = queryables self.encoding = encoding self.filter = None self.condition = None def _disallow_scalar_only_bool_ops(self): pass def prune(self, klass): def pr(left, right): """ create and return a new specialized BinOp from myself """ if left is None: return right elif right is None: return left k = klass if isinstance(left, ConditionBinOp): if (isinstance(left, ConditionBinOp) and isinstance(right, ConditionBinOp)): k = JointConditionBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right elif isinstance(left, FilterBinOp): if (isinstance(left, FilterBinOp) and isinstance(right, FilterBinOp)): k = JointFilterBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right return k(self.op, left, right, queryables=self.queryables, encoding=self.encoding).evaluate() left, right = self.lhs, self.rhs if is_term(left) and is_term(right): res = pr(left.value, right.value) elif not is_term(left) and is_term(right): res = pr(left.prune(klass), right.value) elif is_term(left) and not is_term(right): res = pr(left.value, right.prune(klass)) elif not (is_term(left) or is_term(right)): res = pr(left.prune(klass), right.prune(klass)) return res def conform(self, rhs): """ inplace conform rhs """ if not is_list_like(rhs): rhs = [rhs] if isinstance(rhs, np.ndarray): rhs = rhs.ravel() return rhs @property def is_valid(self): """ return True if this is a valid field """ return self.lhs in self.queryables @property def is_in_table(self): """ return True if this is a valid column name for generation (e.g. an actual column in the table) """ return self.queryables.get(self.lhs) is not None @property def kind(self): """ the kind of my field """ return getattr(self.queryables.get(self.lhs), 'kind', None) @property def meta(self): """ the meta of my field """ return getattr(self.queryables.get(self.lhs), 'meta', None) @property def metadata(self): """ the metadata of my field """ return getattr(self.queryables.get(self.lhs), 'metadata', None) def generate(self, v): """ create and return the op string for this TermValue """ val = v.tostring(self.encoding) return "({lhs} {op} {val})".format(lhs=self.lhs, op=self.op, val=val) def convert_value(self, v): """ convert the expression that is in the term to something that is accepted by pytables """ def stringify(value): if self.encoding is not None: encoder = partial(pprint_thing_encoded, encoding=self.encoding) else: encoder = pprint_thing return encoder(value) kind = _ensure_decoded(self.kind) meta = _ensure_decoded(self.meta) if kind == 'datetime64' or kind == 'datetime': if isinstance(v, (int, float)): v = stringify(v) v = _ensure_decoded(v) v = Timestamp(v) if v.tz is not None: v = v.tz_convert('UTC') return TermValue(v, v.value, kind) elif kind == 'timedelta64' or kind == 'timedelta': v = Timedelta(v, unit='s').value return TermValue(int(v), v, kind) elif meta == 'category': metadata = com.values_from_object(self.metadata) result = metadata.searchsorted(v, side='left') # result returns 0 if v is first element or if v is not in metadata # check that metadata contains v if not result and v not in metadata: result = -1 return TermValue(result, result, 'integer') elif kind == 'integer': v = int(float(v)) return TermValue(v, v, kind) elif kind == 'float': v = float(v) return TermValue(v, v, kind) elif kind == 'bool': if isinstance(v, str): v = not v.strip().lower() in ['false', 'f', 'no', 'n', 'none', '0', '[]', '{}', ''] else: v = bool(v) return TermValue(v, v, kind) elif isinstance(v, str): # string quoting return TermValue(v, stringify(v), 'string') else: raise TypeError("Cannot compare {v} of type {typ} to {kind} column" .format(v=v, typ=type(v), kind=kind)) def convert_values(self): pass class FilterBinOp(BinOp): def __str__(self): return pprint_thing("[Filter : [{lhs}] -> [{op}]" .format(lhs=self.filter[0], op=self.filter[1])) def invert(self): """ invert the filter """ if self.filter is not None: f = list(self.filter) f[1] = self.generate_filter_op(invert=True) self.filter = tuple(f) return self def format(self): """ return the actual filter format """ return [self.filter] def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) rhs = self.conform(self.rhs) values = [TermValue(v, v, self.kind).value for v in rhs] if self.is_in_table: # if too many values to create the expression, use a filter instead if self.op in ['==', '!='] and len(values) > self._max_selectors: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) return self return None # equality conditions if self.op in ['==', '!=']: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) else: raise TypeError("passing a filterable condition to a non-table " "indexer [{slf}]".format(slf=self)) return self def generate_filter_op(self, invert=False): if (self.op == '!=' and not invert) or (self.op == '==' and invert): return lambda axis, vals: ~axis.isin(vals) else: return lambda axis, vals: axis.isin(vals) class JointFilterBinOp(FilterBinOp): def format(self): raise NotImplementedError("unable to collapse Joint Filters") def evaluate(self): return self class ConditionBinOp(BinOp): def __str__(self): return pprint_thing("[Condition : [{cond}]]" .format(cond=self.condition)) def invert(self): """ invert the condition """ # if self.condition is not None: # self.condition = "~(%s)" % self.condition # return self raise NotImplementedError("cannot use an invert condition when " "passing to numexpr") def format(self): """ return the actual ne format """ return self.condition def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) # convert values if we are in the table if not self.is_in_table: return None rhs = self.conform(self.rhs) values = [self.convert_value(v) for v in rhs] # equality conditions if self.op in ['==', '!=']: # too many values to create the expression? if len(values) <= self._max_selectors: vs = [self.generate(v) for v in values] self.condition = "({cond})".format(cond=' | '.join(vs)) # use a filter after reading else: return None else: self.condition = self.generate(values[0]) return self class JointConditionBinOp(ConditionBinOp): def evaluate(self): self.condition = "({lhs} {op} {rhs})".format(lhs=self.lhs.condition, op=self.op, rhs=self.rhs.condition) return self class UnaryOp(ops.UnaryOp): def prune(self, klass): if self.op != '~': raise NotImplementedError("UnaryOp only support invert type ops") operand = self.operand operand = operand.prune(klass) if operand is not None: if issubclass(klass, ConditionBinOp): if operand.condition is not None: return operand.invert() elif issubclass(klass, FilterBinOp): if operand.filter is not None: return operand.invert() return None _op_classes = {'unary': UnaryOp} class ExprVisitor(BaseExprVisitor): const_type = Constant term_type = Term def __init__(self, env, engine, parser, **kwargs): super().__init__(env, engine, parser) for bin_op in self.binary_ops: bin_node = self.binary_op_nodes_map[bin_op] setattr(self, 'visit_{node}'.format(node=bin_node), lambda node, bin_op=bin_op: partial(BinOp, bin_op, **kwargs)) def visit_UnaryOp(self, node, **kwargs): if isinstance(node.op, (ast.Not, ast.Invert)): return UnaryOp('~', self.visit(node.operand)) elif isinstance(node.op, ast.USub): return self.const_type(-self.visit(node.operand).value, self.env) elif isinstance(node.op, ast.UAdd): raise NotImplementedError('Unary addition not supported') def visit_Index(self, node, **kwargs): return self.visit(node.value).value def visit_Assign(self, node, **kwargs): cmpr = ast.Compare(ops=[ast.Eq()], left=node.targets[0], comparators=[node.value]) return self.visit(cmpr) def visit_Subscript(self, node, **kwargs): # only allow simple subscripts value = self.visit(node.value) slobj = self.visit(node.slice) try: value = value.value except AttributeError: pass try: return self.const_type(value[slobj], self.env) except TypeError: raise ValueError("cannot subscript {value!r} with " "{slobj!r}".format(value=value, slobj=slobj)) def visit_Attribute(self, node, **kwargs): attr = node.attr value = node.value ctx = node.ctx.__class__ if ctx == ast.Load: # resolve the value resolved = self.visit(value) # try to get the value to see if we are another expression try: resolved = resolved.value except (AttributeError): pass try: return self.term_type(getattr(resolved, attr), self.env) except AttributeError: # something like datetime.datetime where scope is overridden if isinstance(value, ast.Name) and value.id == attr: return resolved raise ValueError("Invalid Attribute context {name}" .format(name=ctx.__name__)) def translate_In(self, op): return ast.Eq() if isinstance(op, ast.In) else op def _rewrite_membership_op(self, node, left, right): return self.visit(node.op), node.op, left, right def _validate_where(w): """ Validate that the where statement is of the right type. The type may either be String, Expr, or list-like of Exprs. Parameters ---------- w : String term expression, Expr, or list-like of Exprs. Returns ------- where : The original where clause if the check was successful. Raises ------ TypeError : An invalid data type was passed in for w (e.g. dict). """ if not (isinstance(w, (Expr, str)) or is_list_like(w)): raise TypeError("where must be passed as a string, Expr, " "or list-like of Exprs") return w class Expr(expr.Expr): """ hold a pytables like expression, comprised of possibly multiple 'terms' Parameters ---------- where : string term expression, Expr, or list-like of Exprs queryables : a "kinds" map (dict of column name -> kind), or None if column is non-indexable encoding : an encoding that will encode the query terms Returns ------- an Expr object Examples -------- 'index>=date' "columns=['A', 'D']" 'columns=A' 'columns==A' "~(columns=['A','B'])" 'index>df.index[3] & string="bar"' '(index>df.index[3] & index<=df.index[6]) | string="bar"' "ts>=Timestamp('2012-02-01')" "major_axis>=20130101" """ def __init__(self, where, queryables=None, encoding=None, scope_level=0): where = _validate_where(where) self.encoding = encoding self.condition = None self.filter = None self.terms = None self._visitor = None # capture the environment if needed local_dict = DeepChainMap() if isinstance(where, Expr): local_dict = where.env.scope where = where.expr elif isinstance(where, (list, tuple)): for idx, w in enumerate(where): if isinstance(w, Expr): local_dict = w.env.scope else: w = _validate_where(w) where[idx] = w where = ' & '.join(map('({})'.format, com.flatten(where))) # noqa self.expr = where self.env = Scope(scope_level + 1, local_dict=local_dict) if queryables is not None and isinstance(self.expr, str): self.env.queryables.update(queryables) self._visitor = ExprVisitor(self.env, queryables=queryables, parser='pytables', engine='pytables', encoding=encoding) self.terms = self.parse() def __str__(self): if self.terms is not None: return pprint_thing(self.terms) return pprint_thing(self.expr) def evaluate(self): """ create and return the numexpr condition and filter """ try: self.condition = self.terms.prune(ConditionBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid condition".format(expr=self.expr, slf=self)) try: self.filter = self.terms.prune(FilterBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid filter".format(expr=self.expr, slf=self)) return self.condition, self.filter class TermValue: """ hold a term value the we use to construct a condition/filter """ def __init__(self, value, converted, kind): self.value = value self.converted = converted self.kind = kind def tostring(self, encoding): """ quote the string if not encoded else encode and return """ if self.kind == 'string': if encoding is not None: return self.converted return '"{converted}"'.format(converted=self.converted) elif self.kind == 'float': # python 2 str(float) is not always # round-trippable so use repr() return repr(self.converted) return self.converted def maybe_expression(s): """ loose checking if s is a pytables-acceptable expression """ if not isinstance(s, str): return False ops = ExprVisitor.binary_ops + ExprVisitor.unary_ops + ('=',) # make sure we have an op at least return any(op in s for op in ops)
from datetime import timedelta import numpy as np import pytest import pandas as pd from pandas import Timedelta, TimedeltaIndex, timedelta_range, to_timedelta from pandas.core.arrays import TimedeltaArray import pandas.util.testing as tm class TestTimedeltaIndex: def test_verify_integrity_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(['1 Day'], verify_integrity=False) def test_range_kwargs_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 Day', end='3 Days', freq='D') def test_int64_nocopy(self): # GH#23539 check that a copy isn't made when we pass int64 data # and copy=False arr = np.arange(10, dtype=np.int64) tdi = TimedeltaIndex(arr, copy=False) assert tdi._data._data.base is arr def test_infer_from_tdi(self): # GH#23539 # fast-path for inferring a frequency if the passed data already # has one tdi = pd.timedelta_range('1 second', periods=10**7, freq='1s') result = pd.TimedeltaIndex(tdi, freq='infer') assert result.freq == tdi.freq # check that inferred_freq was not called by checking that the # value has not been cached assert "inferred_freq" not in getattr(result, "_cache", {}) def test_infer_from_tdi_mismatch(self): # GH#23539 # fast-path for invalidating a frequency if the passed data already # has one and it does not match the `freq` input tdi = pd.timedelta_range('1 second', periods=100, freq='1s') msg = ("Inferred frequency .* from passed values does " "not conform to passed frequency") with pytest.raises(ValueError, match=msg): TimedeltaIndex(tdi, freq='D') with pytest.raises(ValueError, match=msg): # GH#23789 TimedeltaArray(tdi, freq='D') def test_dt64_data_invalid(self): # GH#23539 # passing tz-aware DatetimeIndex raises, naive or ndarray[datetime64] # does not yet, but will in the future dti = pd.date_range('2016-01-01', periods=3) msg = "cannot be converted to timedelta64" with pytest.raises(TypeError, match=msg): TimedeltaIndex(dti.tz_localize('Europe/Brussels')) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(dti) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(np.asarray(dti)) def test_float64_ns_rounded(self): # GH#23539 without specifying a unit, floats are regarded as nanos, # and fractional portions are truncated tdi = TimedeltaIndex([2.3, 9.7]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # integral floats are non-lossy tdi = TimedeltaIndex([2.0, 9.0]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # NaNs get converted to NaT tdi = TimedeltaIndex([2.0, np.nan]) expected = TimedeltaIndex([pd.Timedelta(nanoseconds=2), pd.NaT]) tm.assert_index_equal(tdi, expected) def test_float64_unit_conversion(self): # GH#23539 tdi = TimedeltaIndex([1.5, 2.25], unit='D') expected = TimedeltaIndex([Timedelta(days=1.5), Timedelta(days=2.25)]) tm.assert_index_equal(tdi, expected) def test_construction_base_constructor(self): arr = [pd.Timedelta('1 days'), pd.NaT, pd.Timedelta('3 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) arr = [np.nan, pd.NaT, pd.Timedelta('1 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) def test_constructor(self): expected = TimedeltaIndex(['1 days', '1 days 00:00:05', '2 days', '2 days 00:00:02', '0 days 00:00:03']) result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) tm.assert_index_equal(result, expected) # unicode result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:01', '0 days 00:00:02']) tm.assert_index_equal(TimedeltaIndex(range(3), unit='s'), expected) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:05', '0 days 00:00:09']) tm.assert_index_equal(TimedeltaIndex([0, 5, 9], unit='s'), expected) expected = TimedeltaIndex( ['0 days 00:00:00.400', '0 days 00:00:00.450', '0 days 00:00:01.200']) tm.assert_index_equal(TimedeltaIndex([400, 450, 1200], unit='ms'), expected) def test_constructor_iso(self): # GH #21877 expected = timedelta_range('1s', periods=9, freq='s') durations = ['P0DT0H0M{}S'.format(i) for i in range(1, 10)] result = to_timedelta(durations) tm.assert_index_equal(result, expected) def test_constructor_coverage(self): rng = timedelta_range('1 days', periods=10.5) exp = timedelta_range('1 days', periods=10) tm.assert_index_equal(rng, exp) msg = 'periods must be a number, got foo' with pytest.raises(TypeError, match=msg): timedelta_range(start='1 days', periods='foo', freq='D') with pytest.raises(ValueError): with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 days', end='10 days') with pytest.raises(TypeError): TimedeltaIndex('1 days') # generator expression gen = (timedelta(i) for i in range(10)) result = TimedeltaIndex(gen) expected = TimedeltaIndex([timedelta(i) for i in range(10)]) tm.assert_index_equal(result, expected) # NumPy string array strings = np.array(['1 days', '2 days', '3 days']) result = TimedeltaIndex(strings) expected = to_timedelta([1, 2, 3], unit='d') tm.assert_index_equal(result, expected) from_ints = TimedeltaIndex(expected.asi8) tm.assert_index_equal(from_ints, expected) # non-conforming freq msg = ("Inferred frequency None from passed values does not conform to" " passed frequency D") with pytest.raises(ValueError, match=msg): TimedeltaIndex(['1 days', '2 days', '4 days'], freq='D') msg = ("Of the four parameters: start, end, periods, and freq, exactly" " three must be specified") with pytest.raises(ValueError, match=msg): timedelta_range(periods=10, freq='D') def test_constructor_name(self): idx = timedelta_range(start='1 days', periods=1, freq='D', name='TEST') assert idx.name == 'TEST' # GH10025 idx2 = TimedeltaIndex(idx, name='something else') assert idx2.name == 'something else' def test_constructor_no_precision_warns(self): # GH-24753, GH-24739 expected = pd.TimedeltaIndex(['2000'], dtype='timedelta64[ns]') # we set the stacklevel for DatetimeIndex with tm.assert_produces_warning(FutureWarning): result = pd.TimedeltaIndex(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = pd.Index(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) def test_constructor_wrong_precision_raises(self): with pytest.raises(ValueError): pd.TimedeltaIndex(['2000'], dtype='timedelta64[us]')
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_construction.py
pandas/core/computation/pytables.py
import numpy as np import pytest import pandas as pd from pandas import Index, MultiIndex @pytest.fixture def idx(): # a MultiIndex used to test the general functionality of the # general functionality of this object major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 2, 3, 3]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def idx_dup(): # compare tests/indexes/multi/conftest.py major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 0, 1, 1]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def index_names(): # names that match those in the idx fixture for testing equality of # names assigned to the idx return ['first', 'second'] @pytest.fixture def holder(): # the MultiIndex constructor used to base compatibility with pickle return MultiIndex @pytest.fixture def compat_props(): # a MultiIndex must have these properties associated with it return ['shape', 'ndim', 'size'] @pytest.fixture def narrow_multi_index(): """ Return a MultiIndex that is narrower than the display (<80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) return pd.MultiIndex.from_arrays([ci, ci.codes + 9, dti], names=['a', 'b', 'dti']) @pytest.fixture def wide_multi_index(): """ Return a MultiIndex that is wider than the display (>80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) levels = [ci, ci.codes + 9, dti, dti, dti] names = ['a', 'b', 'dti_1', 'dti_2', 'dti_3'] return pd.MultiIndex.from_arrays(levels, names=names)
from datetime import timedelta import numpy as np import pytest import pandas as pd from pandas import Timedelta, TimedeltaIndex, timedelta_range, to_timedelta from pandas.core.arrays import TimedeltaArray import pandas.util.testing as tm class TestTimedeltaIndex: def test_verify_integrity_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(['1 Day'], verify_integrity=False) def test_range_kwargs_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 Day', end='3 Days', freq='D') def test_int64_nocopy(self): # GH#23539 check that a copy isn't made when we pass int64 data # and copy=False arr = np.arange(10, dtype=np.int64) tdi = TimedeltaIndex(arr, copy=False) assert tdi._data._data.base is arr def test_infer_from_tdi(self): # GH#23539 # fast-path for inferring a frequency if the passed data already # has one tdi = pd.timedelta_range('1 second', periods=10**7, freq='1s') result = pd.TimedeltaIndex(tdi, freq='infer') assert result.freq == tdi.freq # check that inferred_freq was not called by checking that the # value has not been cached assert "inferred_freq" not in getattr(result, "_cache", {}) def test_infer_from_tdi_mismatch(self): # GH#23539 # fast-path for invalidating a frequency if the passed data already # has one and it does not match the `freq` input tdi = pd.timedelta_range('1 second', periods=100, freq='1s') msg = ("Inferred frequency .* from passed values does " "not conform to passed frequency") with pytest.raises(ValueError, match=msg): TimedeltaIndex(tdi, freq='D') with pytest.raises(ValueError, match=msg): # GH#23789 TimedeltaArray(tdi, freq='D') def test_dt64_data_invalid(self): # GH#23539 # passing tz-aware DatetimeIndex raises, naive or ndarray[datetime64] # does not yet, but will in the future dti = pd.date_range('2016-01-01', periods=3) msg = "cannot be converted to timedelta64" with pytest.raises(TypeError, match=msg): TimedeltaIndex(dti.tz_localize('Europe/Brussels')) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(dti) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(np.asarray(dti)) def test_float64_ns_rounded(self): # GH#23539 without specifying a unit, floats are regarded as nanos, # and fractional portions are truncated tdi = TimedeltaIndex([2.3, 9.7]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # integral floats are non-lossy tdi = TimedeltaIndex([2.0, 9.0]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # NaNs get converted to NaT tdi = TimedeltaIndex([2.0, np.nan]) expected = TimedeltaIndex([pd.Timedelta(nanoseconds=2), pd.NaT]) tm.assert_index_equal(tdi, expected) def test_float64_unit_conversion(self): # GH#23539 tdi = TimedeltaIndex([1.5, 2.25], unit='D') expected = TimedeltaIndex([Timedelta(days=1.5), Timedelta(days=2.25)]) tm.assert_index_equal(tdi, expected) def test_construction_base_constructor(self): arr = [pd.Timedelta('1 days'), pd.NaT, pd.Timedelta('3 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) arr = [np.nan, pd.NaT, pd.Timedelta('1 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) def test_constructor(self): expected = TimedeltaIndex(['1 days', '1 days 00:00:05', '2 days', '2 days 00:00:02', '0 days 00:00:03']) result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) tm.assert_index_equal(result, expected) # unicode result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:01', '0 days 00:00:02']) tm.assert_index_equal(TimedeltaIndex(range(3), unit='s'), expected) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:05', '0 days 00:00:09']) tm.assert_index_equal(TimedeltaIndex([0, 5, 9], unit='s'), expected) expected = TimedeltaIndex( ['0 days 00:00:00.400', '0 days 00:00:00.450', '0 days 00:00:01.200']) tm.assert_index_equal(TimedeltaIndex([400, 450, 1200], unit='ms'), expected) def test_constructor_iso(self): # GH #21877 expected = timedelta_range('1s', periods=9, freq='s') durations = ['P0DT0H0M{}S'.format(i) for i in range(1, 10)] result = to_timedelta(durations) tm.assert_index_equal(result, expected) def test_constructor_coverage(self): rng = timedelta_range('1 days', periods=10.5) exp = timedelta_range('1 days', periods=10) tm.assert_index_equal(rng, exp) msg = 'periods must be a number, got foo' with pytest.raises(TypeError, match=msg): timedelta_range(start='1 days', periods='foo', freq='D') with pytest.raises(ValueError): with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 days', end='10 days') with pytest.raises(TypeError): TimedeltaIndex('1 days') # generator expression gen = (timedelta(i) for i in range(10)) result = TimedeltaIndex(gen) expected = TimedeltaIndex([timedelta(i) for i in range(10)]) tm.assert_index_equal(result, expected) # NumPy string array strings = np.array(['1 days', '2 days', '3 days']) result = TimedeltaIndex(strings) expected = to_timedelta([1, 2, 3], unit='d') tm.assert_index_equal(result, expected) from_ints = TimedeltaIndex(expected.asi8) tm.assert_index_equal(from_ints, expected) # non-conforming freq msg = ("Inferred frequency None from passed values does not conform to" " passed frequency D") with pytest.raises(ValueError, match=msg): TimedeltaIndex(['1 days', '2 days', '4 days'], freq='D') msg = ("Of the four parameters: start, end, periods, and freq, exactly" " three must be specified") with pytest.raises(ValueError, match=msg): timedelta_range(periods=10, freq='D') def test_constructor_name(self): idx = timedelta_range(start='1 days', periods=1, freq='D', name='TEST') assert idx.name == 'TEST' # GH10025 idx2 = TimedeltaIndex(idx, name='something else') assert idx2.name == 'something else' def test_constructor_no_precision_warns(self): # GH-24753, GH-24739 expected = pd.TimedeltaIndex(['2000'], dtype='timedelta64[ns]') # we set the stacklevel for DatetimeIndex with tm.assert_produces_warning(FutureWarning): result = pd.TimedeltaIndex(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = pd.Index(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) def test_constructor_wrong_precision_raises(self): with pytest.raises(ValueError): pd.TimedeltaIndex(['2000'], dtype='timedelta64[us]')
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_construction.py
pandas/tests/indexes/multi/conftest.py
import numpy as np import pytest from pandas._libs.tslib import iNaT from pandas.core.dtypes.dtypes import CategoricalDtype import pandas as pd from pandas import ( CategoricalIndex, DatetimeIndex, Index, Int64Index, IntervalIndex, MultiIndex, PeriodIndex, RangeIndex, Series, TimedeltaIndex, UInt64Index, isna) from pandas.core.indexes.base import InvalidIndexError from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin import pandas.util.testing as tm class Base: """ base class for index sub-class tests """ _holder = None _compat_props = ['shape', 'ndim', 'size', 'nbytes'] def setup_indices(self): for name, idx in self.indices.items(): setattr(self, name, idx) def test_pickle_compat_construction(self): # need an object to create with msg = (r"Index\(\.\.\.\) must be called with a collection of some" r" kind, None was passed|" r"__new__\(\) missing 1 required positional argument: 'data'|" r"__new__\(\) takes at least 2 arguments \(1 given\)") with pytest.raises(TypeError, match=msg): self._holder() def test_to_series(self): # assert that we are creating a copy of the index idx = self.create_index() s = idx.to_series() assert s.values is not idx.values assert s.index is not idx assert s.name == idx.name def test_to_series_with_arguments(self): # GH18699 # index kwarg idx = self.create_index() s = idx.to_series(index=idx) assert s.values is not idx.values assert s.index is idx assert s.name == idx.name # name kwarg idx = self.create_index() s = idx.to_series(name='__test') assert s.values is not idx.values assert s.index is not idx assert s.name != idx.name @pytest.mark.parametrize("name", [None, "new_name"]) def test_to_frame(self, name): # see GH-15230, GH-22580 idx = self.create_index() if name: idx_name = name else: idx_name = idx.name or 0 df = idx.to_frame(name=idx_name) assert df.index is idx assert len(df.columns) == 1 assert df.columns[0] == idx_name assert df[idx_name].values is not idx.values df = idx.to_frame(index=False, name=idx_name) assert df.index is not idx def test_to_frame_datetime_tz(self): # GH 25809 idx = pd.date_range(start='2019-01-01', end='2019-01-30', freq='D') idx = idx.tz_localize('UTC') result = idx.to_frame() expected = pd.DataFrame(idx, index=idx) tm.assert_frame_equal(result, expected) def test_shift(self): # GH8083 test the base class for shift idx = self.create_index() msg = "Not supported for type {}".format(type(idx).__name__) with pytest.raises(NotImplementedError, match=msg): idx.shift(1) with pytest.raises(NotImplementedError, match=msg): idx.shift(1, 2) def test_create_index_existing_name(self): # GH11193, when an existing index is passed, and a new name is not # specified, the new index should inherit the previous object name expected = self.create_index() if not isinstance(expected, MultiIndex): expected.name = 'foo' result = pd.Index(expected) tm.assert_index_equal(result, expected) result = pd.Index(expected, name='bar') expected.name = 'bar' tm.assert_index_equal(result, expected) else: expected.names = ['foo', 'bar'] result = pd.Index(expected) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['foo', 'bar'])) result = pd.Index(expected, names=['A', 'B']) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['A', 'B'])) def test_numeric_compat(self): idx = self.create_index() with pytest.raises(TypeError, match="cannot perform __mul__"): idx * 1 with pytest.raises(TypeError, match="cannot perform __rmul__"): 1 * idx div_err = "cannot perform __truediv__" with pytest.raises(TypeError, match=div_err): idx / 1 div_err = div_err.replace(' __', ' __r') with pytest.raises(TypeError, match=div_err): 1 / idx with pytest.raises(TypeError, match="cannot perform __floordiv__"): idx // 1 with pytest.raises(TypeError, match="cannot perform __rfloordiv__"): 1 // idx def test_logical_compat(self): idx = self.create_index() with pytest.raises(TypeError, match='cannot perform all'): idx.all() with pytest.raises(TypeError, match='cannot perform any'): idx.any() def test_boolean_context_compat(self): # boolean context compat idx = self.create_index() with pytest.raises(ValueError, match='The truth value of a'): if idx: pass def test_reindex_base(self): idx = self.create_index() expected = np.arange(idx.size, dtype=np.intp) actual = idx.get_indexer(idx) tm.assert_numpy_array_equal(expected, actual) with pytest.raises(ValueError, match='Invalid fill method'): idx.get_indexer(idx, method='invalid') def test_get_indexer_consistency(self): # See GH 16819 for name, index in self.indices.items(): if isinstance(index, IntervalIndex): continue if index.is_unique or isinstance(index, CategoricalIndex): indexer = index.get_indexer(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp else: e = "Reindexing only valid with uniquely valued Index objects" with pytest.raises(InvalidIndexError, match=e): index.get_indexer(index[0:2]) indexer, _ = index.get_indexer_non_unique(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp def test_ndarray_compat_properties(self): idx = self.create_index() assert idx.T.equals(idx) assert idx.transpose().equals(idx) values = idx.values for prop in self._compat_props: assert getattr(idx, prop) == getattr(values, prop) # test for validity idx.nbytes idx.values.nbytes def test_repr_roundtrip(self): idx = self.create_index() tm.assert_index_equal(eval(repr(idx)), idx) def test_str(self): # test the string repr idx = self.create_index() idx.name = 'foo' assert "'foo'" in str(idx) assert idx.__class__.__name__ in str(idx) def test_repr_max_seq_item_setting(self): # GH10182 idx = self.create_index() idx = idx.repeat(50) with pd.option_context("display.max_seq_items", None): repr(idx) assert '...' not in str(idx) def test_copy_name(self): # gh-12309: Check that the "name" argument # passed at initialization is honored. for name, index in self.indices.items(): if isinstance(index, MultiIndex): continue first = index.__class__(index, copy=True, name='mario') second = first.__class__(first, copy=False) # Even though "copy=False", we want a new object. assert first is not second # Not using tm.assert_index_equal() since names differ. assert index.equals(first) assert first.name == 'mario' assert second.name == 'mario' s1 = Series(2, index=first) s2 = Series(3, index=second[:-1]) if not isinstance(index, CategoricalIndex): # See gh-13365 s3 = s1 * s2 assert s3.index.name == 'mario' def test_ensure_copied_data(self): # Check the "copy" argument of each Index.__new__ is honoured # GH12309 for name, index in self.indices.items(): init_kwargs = {} if isinstance(index, PeriodIndex): # Needs "freq" specification: init_kwargs['freq'] = index.freq elif isinstance(index, (RangeIndex, MultiIndex, CategoricalIndex)): # RangeIndex cannot be initialized from data # MultiIndex and CategoricalIndex are tested separately continue index_type = index.__class__ result = index_type(index.values, copy=True, **init_kwargs) tm.assert_index_equal(index, result) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='copy') if isinstance(index, PeriodIndex): # .values an object array of Period, thus copied result = index_type(ordinal=index.asi8, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') elif isinstance(index, IntervalIndex): # checked in test_interval.py pass else: result = index_type(index.values, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index.values, result.values, check_same='same') tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') def test_memory_usage(self): for name, index in self.indices.items(): result = index.memory_usage() if len(index): index.get_loc(index[0]) result2 = index.memory_usage() result3 = index.memory_usage(deep=True) # RangeIndex, IntervalIndex # don't have engines if not isinstance(index, (RangeIndex, IntervalIndex)): assert result2 > result if index.inferred_type == 'object': assert result3 > result2 else: # we report 0 for no-length assert result == 0 def test_argsort(self): for k, ind in self.indices.items(): # separately tested if k in ['catIndex']: continue result = ind.argsort() expected = np.array(ind).argsort() tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_numpy_argsort(self): for k, ind in self.indices.items(): result = np.argsort(ind) expected = ind.argsort() tm.assert_numpy_array_equal(result, expected) # these are the only two types that perform # pandas compatibility input validation - the # rest already perform separate (or no) such # validation via their 'values' attribute as # defined in pandas.core.indexes/base.py - they # cannot be changed at the moment due to # backwards compatibility concerns if isinstance(type(ind), (CategoricalIndex, RangeIndex)): msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, axis=1) msg = "the 'kind' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, kind='mergesort') msg = "the 'order' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, order=('a', 'b')) def test_take(self): indexer = [4, 3, 0, 2] for k, ind in self.indices.items(): # separate if k in ['boolIndex', 'tuples', 'empty']: continue result = ind.take(indexer) expected = ind[indexer] assert result.equals(expected) if not isinstance(ind, (DatetimeIndex, PeriodIndex, TimedeltaIndex)): # GH 10791 with pytest.raises(AttributeError): ind.freq def test_take_invalid_kwargs(self): idx = self.create_index() indices = [1, 2] msg = r"take\(\) got an unexpected keyword argument 'foo'" with pytest.raises(TypeError, match=msg): idx.take(indices, foo=2) msg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, out=indices) msg = "the 'mode' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, mode='clip') def test_repeat(self): rep = 2 i = self.create_index() expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) i = self.create_index() rep = np.arange(len(i)) expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) def test_numpy_repeat(self): rep = 2 i = self.create_index() expected = i.repeat(rep) tm.assert_index_equal(np.repeat(i, rep), expected) msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.repeat(i, rep, axis=0) @pytest.mark.parametrize('klass', [list, tuple, np.array, Series]) def test_where(self, klass): i = self.create_index() cond = [True] * len(i) result = i.where(klass(cond)) expected = i tm.assert_index_equal(result, expected) cond = [False] + [True] * len(i[1:]) expected = pd.Index([i._na_value] + i[1:].tolist(), dtype=i.dtype) result = i.where(klass(cond)) tm.assert_index_equal(result, expected) @pytest.mark.parametrize("case", [0.5, "xxx"]) @pytest.mark.parametrize("method", ["intersection", "union", "difference", "symmetric_difference"]) def test_set_ops_error_cases(self, case, method): for name, idx in self.indices.items(): # non-iterable input msg = "Input must be Index or array-like" with pytest.raises(TypeError, match=msg): getattr(idx, method)(case) def test_intersection_base(self): for name, idx in self.indices.items(): first = idx[:5] second = idx[:3] intersect = first.intersection(second) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(intersect, second) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.intersection(case) assert tm.equalContents(result, second) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.intersection([1, 2, 3]) def test_union_base(self): for name, idx in self.indices.items(): first = idx[3:] second = idx[:5] everything = idx union = first.union(second) assert tm.equalContents(union, everything) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.union(case) assert tm.equalContents(result, everything) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.union([1, 2, 3]) @pytest.mark.parametrize("sort", [None, False]) def test_difference_base(self, sort): for name, idx in self.indices.items(): first = idx[2:] second = idx[:4] answer = idx[4:] result = first.difference(second, sort) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass elif isinstance(idx, (DatetimeIndex, TimedeltaIndex)): assert result.__class__ == answer.__class__ tm.assert_numpy_array_equal(result.sort_values().asi8, answer.sort_values().asi8) else: result = first.difference(case, sort) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.difference([1, 2, 3], sort) def test_symmetric_difference(self): for name, idx in self.indices.items(): first = idx[1:] second = idx[:-1] if isinstance(idx, CategoricalIndex): pass else: answer = idx[[0, -1]] result = first.symmetric_difference(second) assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.symmetric_difference(case) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.symmetric_difference([1, 2, 3]) def test_insert_base(self): for name, idx in self.indices.items(): result = idx[1:4] if not len(idx): continue # test 0th element assert idx[0:4].equals(result.insert(0, idx[0])) def test_delete_base(self): for name, idx in self.indices.items(): if not len(idx): continue if isinstance(idx, RangeIndex): # tested in class continue expected = idx[1:] result = idx.delete(0) assert result.equals(expected) assert result.name == expected.name expected = idx[:-1] result = idx.delete(-1) assert result.equals(expected) assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version idx.delete(len(idx)) def test_equals(self): for name, idx in self.indices.items(): assert idx.equals(idx) assert idx.equals(idx.copy()) assert idx.equals(idx.astype(object)) assert not idx.equals(list(idx)) assert not idx.equals(np.array(idx)) # Cannot pass in non-int64 dtype to RangeIndex if not isinstance(idx, RangeIndex): same_values = Index(idx, dtype=object) assert idx.equals(same_values) assert same_values.equals(idx) if idx.nlevels == 1: # do not test MultiIndex assert not idx.equals(pd.Series(idx)) def test_equals_op(self): # GH9947, GH10637 index_a = self.create_index() if isinstance(index_a, PeriodIndex): pytest.skip('Skip check for PeriodIndex') n = len(index_a) index_b = index_a[0:-1] index_c = index_a[0:-1].append(index_a[-2:-1]) index_d = index_a[0:1] msg = "Lengths must match|could not be broadcast" with pytest.raises(ValueError, match=msg): index_a == index_b expected1 = np.array([True] * n) expected2 = np.array([True] * (n - 1) + [False]) tm.assert_numpy_array_equal(index_a == index_a, expected1) tm.assert_numpy_array_equal(index_a == index_c, expected2) # test comparisons with numpy arrays array_a = np.array(index_a) array_b = np.array(index_a[0:-1]) array_c = np.array(index_a[0:-1].append(index_a[-2:-1])) array_d = np.array(index_a[0:1]) with pytest.raises(ValueError, match=msg): index_a == array_b tm.assert_numpy_array_equal(index_a == array_a, expected1) tm.assert_numpy_array_equal(index_a == array_c, expected2) # test comparisons with Series series_a = Series(array_a) series_b = Series(array_b) series_c = Series(array_c) series_d = Series(array_d) with pytest.raises(ValueError, match=msg): index_a == series_b tm.assert_numpy_array_equal(index_a == series_a, expected1) tm.assert_numpy_array_equal(index_a == series_c, expected2) # cases where length is 1 for one of them with pytest.raises(ValueError, match="Lengths must match"): index_a == index_d with pytest.raises(ValueError, match="Lengths must match"): index_a == series_d with pytest.raises(ValueError, match="Lengths must match"): index_a == array_d msg = "Can only compare identically-labeled Series objects" with pytest.raises(ValueError, match=msg): series_a == series_d with pytest.raises(ValueError, match="Lengths must match"): series_a == array_d # comparing with a scalar should broadcast; note that we are excluding # MultiIndex because in this case each item in the index is a tuple of # length 2, and therefore is considered an array of length 2 in the # comparison instead of a scalar if not isinstance(index_a, MultiIndex): expected3 = np.array([False] * (len(index_a) - 2) + [True, False]) # assuming the 2nd to last item is unique in the data item = index_a[-2] tm.assert_numpy_array_equal(index_a == item, expected3) tm.assert_series_equal(series_a == item, Series(expected3)) def test_hasnans_isnans(self): # GH 11343, added tests for hasnans / isnans for name, index in self.indices.items(): if isinstance(index, MultiIndex): pass else: idx = index.copy() # cases in indices doesn't include NaN expected = np.array([False] * len(idx), dtype=bool) tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is False idx = index.copy() values = np.asarray(idx.values) if len(index) == 0: continue elif isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_fillna(self): # GH 11343 for name, index in self.indices.items(): if len(index) == 0: pass elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.fillna(idx[0]) else: idx = index.copy() result = idx.fillna(idx[0]) tm.assert_index_equal(result, idx) assert result is not idx msg = "'value' must be a scalar, passed: " with pytest.raises(TypeError, match=msg): idx.fillna([idx[0]]) idx = index.copy() values = np.asarray(idx.values) if isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_nulls(self): # this is really a smoke test for the methods # as these are adequately tested for function elsewhere for name, index in self.indices.items(): if len(index) == 0: tm.assert_numpy_array_equal( index.isna(), np.array([], dtype=bool)) elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.isna() else: if not index.hasnans: tm.assert_numpy_array_equal( index.isna(), np.zeros(len(index), dtype=bool)) tm.assert_numpy_array_equal( index.notna(), np.ones(len(index), dtype=bool)) else: result = isna(index) tm.assert_numpy_array_equal(index.isna(), result) tm.assert_numpy_array_equal(index.notna(), ~result) def test_empty(self): # GH 15270 index = self.create_index() assert not index.empty assert index[:0].empty def test_join_self_unique(self, join_type): index = self.create_index() if index.is_unique: joined = index.join(index, how=join_type) assert (index == joined).all() def test_map(self): # callable index = self.create_index() # we don't infer UInt64 if isinstance(index, pd.UInt64Index): expected = index.astype('int64') else: expected = index result = index.map(lambda x: x) tm.assert_index_equal(result, expected) @pytest.mark.parametrize( "mapper", [ lambda values, index: {i: e for e, i in zip(values, index)}, lambda values, index: pd.Series(values, index)]) def test_map_dictlike(self, mapper): index = self.create_index() if isinstance(index, (pd.CategoricalIndex, pd.IntervalIndex)): pytest.skip("skipping tests for {}".format(type(index))) identity = mapper(index.values, index) # we don't infer to UInt64 for a dict if isinstance(index, pd.UInt64Index) and isinstance(identity, dict): expected = index.astype('int64') else: expected = index result = index.map(identity) tm.assert_index_equal(result, expected) # empty mappable expected = pd.Index([np.nan] * len(index)) result = index.map(mapper(expected, index)) tm.assert_index_equal(result, expected) def test_putmask_with_wrong_mask(self): # GH18368 index = self.create_index() with pytest.raises(ValueError): index.putmask(np.ones(len(index) + 1, np.bool), 1) with pytest.raises(ValueError): index.putmask(np.ones(len(index) - 1, np.bool), 1) with pytest.raises(ValueError): index.putmask('foo', 1) @pytest.mark.parametrize('copy', [True, False]) @pytest.mark.parametrize('name', [None, 'foo']) @pytest.mark.parametrize('ordered', [True, False]) def test_astype_category(self, copy, name, ordered): # GH 18630 index = self.create_index() if name: index = index.rename(name) # standard categories dtype = CategoricalDtype(ordered=ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, ordered=ordered) tm.assert_index_equal(result, expected) # non-standard categories dtype = CategoricalDtype(index.unique().tolist()[:-1], ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, dtype=dtype) tm.assert_index_equal(result, expected) if ordered is False: # dtype='category' defaults to ordered=False, so only test once result = index.astype('category', copy=copy) expected = CategoricalIndex(index.values, name=name) tm.assert_index_equal(result, expected) def test_is_unique(self): # initialize a unique index index = self.create_index().drop_duplicates() assert index.is_unique is True # empty index should be unique index_empty = index[:0] assert index_empty.is_unique is True # test basic dupes index_dup = index.insert(0, index[0]) assert index_dup.is_unique is False # single NA should be unique index_na = index.insert(0, np.nan) assert index_na.is_unique is True # multiple NA should not be unique index_na_dup = index_na.insert(0, np.nan) assert index_na_dup.is_unique is False
from datetime import timedelta import numpy as np import pytest import pandas as pd from pandas import Timedelta, TimedeltaIndex, timedelta_range, to_timedelta from pandas.core.arrays import TimedeltaArray import pandas.util.testing as tm class TestTimedeltaIndex: def test_verify_integrity_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(['1 Day'], verify_integrity=False) def test_range_kwargs_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 Day', end='3 Days', freq='D') def test_int64_nocopy(self): # GH#23539 check that a copy isn't made when we pass int64 data # and copy=False arr = np.arange(10, dtype=np.int64) tdi = TimedeltaIndex(arr, copy=False) assert tdi._data._data.base is arr def test_infer_from_tdi(self): # GH#23539 # fast-path for inferring a frequency if the passed data already # has one tdi = pd.timedelta_range('1 second', periods=10**7, freq='1s') result = pd.TimedeltaIndex(tdi, freq='infer') assert result.freq == tdi.freq # check that inferred_freq was not called by checking that the # value has not been cached assert "inferred_freq" not in getattr(result, "_cache", {}) def test_infer_from_tdi_mismatch(self): # GH#23539 # fast-path for invalidating a frequency if the passed data already # has one and it does not match the `freq` input tdi = pd.timedelta_range('1 second', periods=100, freq='1s') msg = ("Inferred frequency .* from passed values does " "not conform to passed frequency") with pytest.raises(ValueError, match=msg): TimedeltaIndex(tdi, freq='D') with pytest.raises(ValueError, match=msg): # GH#23789 TimedeltaArray(tdi, freq='D') def test_dt64_data_invalid(self): # GH#23539 # passing tz-aware DatetimeIndex raises, naive or ndarray[datetime64] # does not yet, but will in the future dti = pd.date_range('2016-01-01', periods=3) msg = "cannot be converted to timedelta64" with pytest.raises(TypeError, match=msg): TimedeltaIndex(dti.tz_localize('Europe/Brussels')) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(dti) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(np.asarray(dti)) def test_float64_ns_rounded(self): # GH#23539 without specifying a unit, floats are regarded as nanos, # and fractional portions are truncated tdi = TimedeltaIndex([2.3, 9.7]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # integral floats are non-lossy tdi = TimedeltaIndex([2.0, 9.0]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # NaNs get converted to NaT tdi = TimedeltaIndex([2.0, np.nan]) expected = TimedeltaIndex([pd.Timedelta(nanoseconds=2), pd.NaT]) tm.assert_index_equal(tdi, expected) def test_float64_unit_conversion(self): # GH#23539 tdi = TimedeltaIndex([1.5, 2.25], unit='D') expected = TimedeltaIndex([Timedelta(days=1.5), Timedelta(days=2.25)]) tm.assert_index_equal(tdi, expected) def test_construction_base_constructor(self): arr = [pd.Timedelta('1 days'), pd.NaT, pd.Timedelta('3 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) arr = [np.nan, pd.NaT, pd.Timedelta('1 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) def test_constructor(self): expected = TimedeltaIndex(['1 days', '1 days 00:00:05', '2 days', '2 days 00:00:02', '0 days 00:00:03']) result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) tm.assert_index_equal(result, expected) # unicode result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:01', '0 days 00:00:02']) tm.assert_index_equal(TimedeltaIndex(range(3), unit='s'), expected) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:05', '0 days 00:00:09']) tm.assert_index_equal(TimedeltaIndex([0, 5, 9], unit='s'), expected) expected = TimedeltaIndex( ['0 days 00:00:00.400', '0 days 00:00:00.450', '0 days 00:00:01.200']) tm.assert_index_equal(TimedeltaIndex([400, 450, 1200], unit='ms'), expected) def test_constructor_iso(self): # GH #21877 expected = timedelta_range('1s', periods=9, freq='s') durations = ['P0DT0H0M{}S'.format(i) for i in range(1, 10)] result = to_timedelta(durations) tm.assert_index_equal(result, expected) def test_constructor_coverage(self): rng = timedelta_range('1 days', periods=10.5) exp = timedelta_range('1 days', periods=10) tm.assert_index_equal(rng, exp) msg = 'periods must be a number, got foo' with pytest.raises(TypeError, match=msg): timedelta_range(start='1 days', periods='foo', freq='D') with pytest.raises(ValueError): with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 days', end='10 days') with pytest.raises(TypeError): TimedeltaIndex('1 days') # generator expression gen = (timedelta(i) for i in range(10)) result = TimedeltaIndex(gen) expected = TimedeltaIndex([timedelta(i) for i in range(10)]) tm.assert_index_equal(result, expected) # NumPy string array strings = np.array(['1 days', '2 days', '3 days']) result = TimedeltaIndex(strings) expected = to_timedelta([1, 2, 3], unit='d') tm.assert_index_equal(result, expected) from_ints = TimedeltaIndex(expected.asi8) tm.assert_index_equal(from_ints, expected) # non-conforming freq msg = ("Inferred frequency None from passed values does not conform to" " passed frequency D") with pytest.raises(ValueError, match=msg): TimedeltaIndex(['1 days', '2 days', '4 days'], freq='D') msg = ("Of the four parameters: start, end, periods, and freq, exactly" " three must be specified") with pytest.raises(ValueError, match=msg): timedelta_range(periods=10, freq='D') def test_constructor_name(self): idx = timedelta_range(start='1 days', periods=1, freq='D', name='TEST') assert idx.name == 'TEST' # GH10025 idx2 = TimedeltaIndex(idx, name='something else') assert idx2.name == 'something else' def test_constructor_no_precision_warns(self): # GH-24753, GH-24739 expected = pd.TimedeltaIndex(['2000'], dtype='timedelta64[ns]') # we set the stacklevel for DatetimeIndex with tm.assert_produces_warning(FutureWarning): result = pd.TimedeltaIndex(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = pd.Index(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) def test_constructor_wrong_precision_raises(self): with pytest.raises(ValueError): pd.TimedeltaIndex(['2000'], dtype='timedelta64[us]')
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_construction.py
pandas/tests/indexes/common.py
import numpy as np from pandas._libs import algos as libalgos, index as libindex import pandas.util.testing as tm class TestNumericEngine: def test_is_monotonic(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) # monotonic increasing engine = engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array([1] * num + [2] * num + [1] * num, dtype=dtype) engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 3, 2], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array([1, 2, 1], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 2, 3], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == 1 # monotonic num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == slice(1000, 2000) # not monotonic arr = np.array([1, 2, 3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc(2) assert (result == expected).all() def test_get_backfill_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill(arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad(arr, new) tm.assert_numpy_array_equal(result, expected) class TestObjectEngine: engine_type = libindex.ObjectEngine dtype = np.object_ values = list('abc') def test_is_monotonic(self): num = 1000 arr = np.array(['a'] * num + ['a'] * num + ['c'] * num, dtype=self.dtype) # monotonic increasing engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array(['a'] * num + ['b'] * num + ['a'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array(['a', 'b', 'a'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == 1 # monotonic num = 1000 arr = np.array(['a'] * num + ['b'] * num + ['c'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == slice(1000, 2000) # not monotonic arr = np.array(self.values * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc('b') assert (result == expected).all() def test_get_backfill_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill["object"](arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad["object"](arr, new) tm.assert_numpy_array_equal(result, expected)
from datetime import timedelta import numpy as np import pytest import pandas as pd from pandas import Timedelta, TimedeltaIndex, timedelta_range, to_timedelta from pandas.core.arrays import TimedeltaArray import pandas.util.testing as tm class TestTimedeltaIndex: def test_verify_integrity_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(['1 Day'], verify_integrity=False) def test_range_kwargs_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 Day', end='3 Days', freq='D') def test_int64_nocopy(self): # GH#23539 check that a copy isn't made when we pass int64 data # and copy=False arr = np.arange(10, dtype=np.int64) tdi = TimedeltaIndex(arr, copy=False) assert tdi._data._data.base is arr def test_infer_from_tdi(self): # GH#23539 # fast-path for inferring a frequency if the passed data already # has one tdi = pd.timedelta_range('1 second', periods=10**7, freq='1s') result = pd.TimedeltaIndex(tdi, freq='infer') assert result.freq == tdi.freq # check that inferred_freq was not called by checking that the # value has not been cached assert "inferred_freq" not in getattr(result, "_cache", {}) def test_infer_from_tdi_mismatch(self): # GH#23539 # fast-path for invalidating a frequency if the passed data already # has one and it does not match the `freq` input tdi = pd.timedelta_range('1 second', periods=100, freq='1s') msg = ("Inferred frequency .* from passed values does " "not conform to passed frequency") with pytest.raises(ValueError, match=msg): TimedeltaIndex(tdi, freq='D') with pytest.raises(ValueError, match=msg): # GH#23789 TimedeltaArray(tdi, freq='D') def test_dt64_data_invalid(self): # GH#23539 # passing tz-aware DatetimeIndex raises, naive or ndarray[datetime64] # does not yet, but will in the future dti = pd.date_range('2016-01-01', periods=3) msg = "cannot be converted to timedelta64" with pytest.raises(TypeError, match=msg): TimedeltaIndex(dti.tz_localize('Europe/Brussels')) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(dti) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(np.asarray(dti)) def test_float64_ns_rounded(self): # GH#23539 without specifying a unit, floats are regarded as nanos, # and fractional portions are truncated tdi = TimedeltaIndex([2.3, 9.7]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # integral floats are non-lossy tdi = TimedeltaIndex([2.0, 9.0]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # NaNs get converted to NaT tdi = TimedeltaIndex([2.0, np.nan]) expected = TimedeltaIndex([pd.Timedelta(nanoseconds=2), pd.NaT]) tm.assert_index_equal(tdi, expected) def test_float64_unit_conversion(self): # GH#23539 tdi = TimedeltaIndex([1.5, 2.25], unit='D') expected = TimedeltaIndex([Timedelta(days=1.5), Timedelta(days=2.25)]) tm.assert_index_equal(tdi, expected) def test_construction_base_constructor(self): arr = [pd.Timedelta('1 days'), pd.NaT, pd.Timedelta('3 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) arr = [np.nan, pd.NaT, pd.Timedelta('1 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) def test_constructor(self): expected = TimedeltaIndex(['1 days', '1 days 00:00:05', '2 days', '2 days 00:00:02', '0 days 00:00:03']) result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) tm.assert_index_equal(result, expected) # unicode result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:01', '0 days 00:00:02']) tm.assert_index_equal(TimedeltaIndex(range(3), unit='s'), expected) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:05', '0 days 00:00:09']) tm.assert_index_equal(TimedeltaIndex([0, 5, 9], unit='s'), expected) expected = TimedeltaIndex( ['0 days 00:00:00.400', '0 days 00:00:00.450', '0 days 00:00:01.200']) tm.assert_index_equal(TimedeltaIndex([400, 450, 1200], unit='ms'), expected) def test_constructor_iso(self): # GH #21877 expected = timedelta_range('1s', periods=9, freq='s') durations = ['P0DT0H0M{}S'.format(i) for i in range(1, 10)] result = to_timedelta(durations) tm.assert_index_equal(result, expected) def test_constructor_coverage(self): rng = timedelta_range('1 days', periods=10.5) exp = timedelta_range('1 days', periods=10) tm.assert_index_equal(rng, exp) msg = 'periods must be a number, got foo' with pytest.raises(TypeError, match=msg): timedelta_range(start='1 days', periods='foo', freq='D') with pytest.raises(ValueError): with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 days', end='10 days') with pytest.raises(TypeError): TimedeltaIndex('1 days') # generator expression gen = (timedelta(i) for i in range(10)) result = TimedeltaIndex(gen) expected = TimedeltaIndex([timedelta(i) for i in range(10)]) tm.assert_index_equal(result, expected) # NumPy string array strings = np.array(['1 days', '2 days', '3 days']) result = TimedeltaIndex(strings) expected = to_timedelta([1, 2, 3], unit='d') tm.assert_index_equal(result, expected) from_ints = TimedeltaIndex(expected.asi8) tm.assert_index_equal(from_ints, expected) # non-conforming freq msg = ("Inferred frequency None from passed values does not conform to" " passed frequency D") with pytest.raises(ValueError, match=msg): TimedeltaIndex(['1 days', '2 days', '4 days'], freq='D') msg = ("Of the four parameters: start, end, periods, and freq, exactly" " three must be specified") with pytest.raises(ValueError, match=msg): timedelta_range(periods=10, freq='D') def test_constructor_name(self): idx = timedelta_range(start='1 days', periods=1, freq='D', name='TEST') assert idx.name == 'TEST' # GH10025 idx2 = TimedeltaIndex(idx, name='something else') assert idx2.name == 'something else' def test_constructor_no_precision_warns(self): # GH-24753, GH-24739 expected = pd.TimedeltaIndex(['2000'], dtype='timedelta64[ns]') # we set the stacklevel for DatetimeIndex with tm.assert_produces_warning(FutureWarning): result = pd.TimedeltaIndex(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = pd.Index(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) def test_constructor_wrong_precision_raises(self): with pytest.raises(ValueError): pd.TimedeltaIndex(['2000'], dtype='timedelta64[us]')
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_construction.py
pandas/tests/indexing/test_indexing_engines.py
""" Arithmetic operations for PandasObjects This is not a public API. """ import datetime import operator import textwrap from typing import Dict, Optional import warnings import numpy as np from pandas._libs import algos as libalgos, lib, ops as libops from pandas.errors import NullFrequencyError from pandas.util._decorators import Appender from pandas.core.dtypes.cast import ( construct_1d_object_array_from_listlike, find_common_type, maybe_upcast_putmask) from pandas.core.dtypes.common import ( ensure_object, is_bool_dtype, is_categorical_dtype, is_datetime64_dtype, is_datetime64tz_dtype, is_datetimelike_v_numeric, is_extension_array_dtype, is_integer_dtype, is_list_like, is_object_dtype, is_period_dtype, is_scalar, is_timedelta64_dtype, needs_i8_conversion) from pandas.core.dtypes.generic import ( ABCDataFrame, ABCIndex, ABCIndexClass, ABCSeries, ABCSparseArray, ABCSparseSeries) from pandas.core.dtypes.missing import isna, notna import pandas as pd import pandas.core.common as com import pandas.core.missing as missing # ----------------------------------------------------------------------------- # Ops Wrapping Utilities def get_op_result_name(left, right): """ Find the appropriate name to pin to an operation result. This result should always be either an Index or a Series. Parameters ---------- left : {Series, Index} right : object Returns ------- name : object Usually a string """ # `left` is always a pd.Series when called from within ops if isinstance(right, (ABCSeries, pd.Index)): name = _maybe_match_name(left, right) else: name = left.name return name def _maybe_match_name(a, b): """ Try to find a name to attach to the result of an operation between a and b. If only one of these has a `name` attribute, return that name. Otherwise return a consensus name if they match of None if they have different names. Parameters ---------- a : object b : object Returns ------- name : str or None See Also -------- pandas.core.common.consensus_name_attr """ a_has = hasattr(a, 'name') b_has = hasattr(b, 'name') if a_has and b_has: if a.name == b.name: return a.name else: # TODO: what if they both have np.nan for their names? return None elif a_has: return a.name elif b_has: return b.name return None def maybe_upcast_for_op(obj): """ Cast non-pandas objects to pandas types to unify behavior of arithmetic and comparison operations. Parameters ---------- obj: object Returns ------- out : object Notes ----- Be careful to call this *after* determining the `name` attribute to be attached to the result of the arithmetic operation. """ if type(obj) is datetime.timedelta: # GH#22390 cast up to Timedelta to rely on Timedelta # implementation; otherwise operation against numeric-dtype # raises TypeError return pd.Timedelta(obj) elif isinstance(obj, np.timedelta64) and not isna(obj): # In particular non-nanosecond timedelta64 needs to be cast to # nanoseconds, or else we get undesired behavior like # np.timedelta64(3, 'D') / 2 == np.timedelta64(1, 'D') # The isna check is to avoid casting timedelta64("NaT"), which would # return NaT and incorrectly be treated as a datetime-NaT. return pd.Timedelta(obj) elif isinstance(obj, np.ndarray) and is_timedelta64_dtype(obj): # GH#22390 Unfortunately we need to special-case right-hand # timedelta64 dtypes because numpy casts integer dtypes to # timedelta64 when operating with timedelta64 return pd.TimedeltaIndex(obj) return obj # ----------------------------------------------------------------------------- # Reversed Operations not available in the stdlib operator module. # Defining these instead of using lambdas allows us to reference them by name. def radd(left, right): return right + left def rsub(left, right): return right - left def rmul(left, right): return right * left def rdiv(left, right): return right / left def rtruediv(left, right): return right / left def rfloordiv(left, right): return right // left def rmod(left, right): # check if right is a string as % is the string # formatting operation; this is a TypeError # otherwise perform the op if isinstance(right, str): raise TypeError("{typ} cannot perform the operation mod".format( typ=type(left).__name__)) return right % left def rdivmod(left, right): return divmod(right, left) def rpow(left, right): return right ** left def rand_(left, right): return operator.and_(right, left) def ror_(left, right): return operator.or_(right, left) def rxor(left, right): return operator.xor(right, left) # ----------------------------------------------------------------------------- def make_invalid_op(name): """ Return a binary method that always raises a TypeError. Parameters ---------- name : str Returns ------- invalid_op : function """ def invalid_op(self, other=None): raise TypeError("cannot perform {name} with this index type: " "{typ}".format(name=name, typ=type(self).__name__)) invalid_op.__name__ = name return invalid_op def _gen_eval_kwargs(name): """ Find the keyword arguments to pass to numexpr for the given operation. Parameters ---------- name : str Returns ------- eval_kwargs : dict Examples -------- >>> _gen_eval_kwargs("__add__") {} >>> _gen_eval_kwargs("rtruediv") {'reversed': True, 'truediv': True} """ kwargs = {} # Series appear to only pass __add__, __radd__, ... # but DataFrame gets both these dunder names _and_ non-dunder names # add, radd, ... name = name.replace('__', '') if name.startswith('r'): if name not in ['radd', 'rand', 'ror', 'rxor']: # Exclude commutative operations kwargs['reversed'] = True if name in ['truediv', 'rtruediv']: kwargs['truediv'] = True if name in ['ne']: kwargs['masker'] = True return kwargs def _gen_fill_zeros(name): """ Find the appropriate fill value to use when filling in undefined values in the results of the given operation caused by operating on (generally dividing by) zero. Parameters ---------- name : str Returns ------- fill_value : {None, np.nan, np.inf} """ name = name.strip('__') if 'div' in name: # truediv, floordiv, div, and reversed variants fill_value = np.inf elif 'mod' in name: # mod, rmod fill_value = np.nan else: fill_value = None return fill_value def _get_frame_op_default_axis(name): """ Only DataFrame cares about default_axis, specifically: special methods have default_axis=None and flex methods have default_axis='columns'. Parameters ---------- name : str Returns ------- default_axis: str or None """ if name.replace('__r', '__') in ['__and__', '__or__', '__xor__']: # bool methods return 'columns' elif name.startswith('__'): # __add__, __mul__, ... return None else: # add, mul, ... return 'columns' def _get_opstr(op, cls): """ Find the operation string, if any, to pass to numexpr for this operation. Parameters ---------- op : binary operator cls : class Returns ------- op_str : string or None """ # numexpr is available for non-sparse classes subtyp = getattr(cls, '_subtyp', '') use_numexpr = 'sparse' not in subtyp if not use_numexpr: # if we're not using numexpr, then don't pass a str_rep return None return {operator.add: '+', radd: '+', operator.mul: '*', rmul: '*', operator.sub: '-', rsub: '-', operator.truediv: '/', rtruediv: '/', operator.floordiv: '//', rfloordiv: '//', operator.mod: None, # TODO: Why None for mod but '%' for rmod? rmod: '%', operator.pow: '**', rpow: '**', operator.eq: '==', operator.ne: '!=', operator.le: '<=', operator.lt: '<', operator.ge: '>=', operator.gt: '>', operator.and_: '&', rand_: '&', operator.or_: '|', ror_: '|', operator.xor: '^', rxor: '^', divmod: None, rdivmod: None}[op] def _get_op_name(op, special): """ Find the name to attach to this method according to conventions for special and non-special methods. Parameters ---------- op : binary operator special : bool Returns ------- op_name : str """ opname = op.__name__.strip('_') if special: opname = '__{opname}__'.format(opname=opname) return opname # ----------------------------------------------------------------------------- # Docstring Generation and Templates _add_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.add(b, fill_value=0) a 2.0 b 1.0 c 1.0 d 1.0 e NaN dtype: float64 """ _sub_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.subtract(b, fill_value=0) a 0.0 b 1.0 c 1.0 d -1.0 e NaN dtype: float64 """ _mul_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.multiply(b, fill_value=0) a 1.0 b 0.0 c 0.0 d 0.0 e NaN dtype: float64 """ _div_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.divide(b, fill_value=0) a 1.0 b inf c inf d 0.0 e NaN dtype: float64 """ _floordiv_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.floordiv(b, fill_value=0) a 1.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _mod_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.mod(b, fill_value=0) a 0.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _pow_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.pow(b, fill_value=0) a 1.0 b 1.0 c 1.0 d 0.0 e NaN dtype: float64 """ _op_descriptions = { # Arithmetic Operators 'add': {'op': '+', 'desc': 'Addition', 'reverse': 'radd', 'series_examples': _add_example_SERIES}, 'sub': {'op': '-', 'desc': 'Subtraction', 'reverse': 'rsub', 'series_examples': _sub_example_SERIES}, 'mul': {'op': '*', 'desc': 'Multiplication', 'reverse': 'rmul', 'series_examples': _mul_example_SERIES, 'df_examples': None}, 'mod': {'op': '%', 'desc': 'Modulo', 'reverse': 'rmod', 'series_examples': _mod_example_SERIES}, 'pow': {'op': '**', 'desc': 'Exponential power', 'reverse': 'rpow', 'series_examples': _pow_example_SERIES, 'df_examples': None}, 'truediv': {'op': '/', 'desc': 'Floating division', 'reverse': 'rtruediv', 'series_examples': _div_example_SERIES, 'df_examples': None}, 'floordiv': {'op': '//', 'desc': 'Integer division', 'reverse': 'rfloordiv', 'series_examples': _floordiv_example_SERIES, 'df_examples': None}, 'divmod': {'op': 'divmod', 'desc': 'Integer division and modulo', 'reverse': 'rdivmod', 'series_examples': None, 'df_examples': None}, # Comparison Operators 'eq': {'op': '==', 'desc': 'Equal to', 'reverse': None, 'series_examples': None}, 'ne': {'op': '!=', 'desc': 'Not equal to', 'reverse': None, 'series_examples': None}, 'lt': {'op': '<', 'desc': 'Less than', 'reverse': None, 'series_examples': None}, 'le': {'op': '<=', 'desc': 'Less than or equal to', 'reverse': None, 'series_examples': None}, 'gt': {'op': '>', 'desc': 'Greater than', 'reverse': None, 'series_examples': None}, 'ge': {'op': '>=', 'desc': 'Greater than or equal to', 'reverse': None, 'series_examples': None} } # type: Dict[str, Dict[str, Optional[str]]] _op_names = list(_op_descriptions.keys()) for key in _op_names: reverse_op = _op_descriptions[key]['reverse'] if reverse_op is not None: _op_descriptions[reverse_op] = _op_descriptions[key].copy() _op_descriptions[reverse_op]['reverse'] = key _flex_doc_SERIES = """ Return {desc} of series and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. Parameters ---------- other : Series or scalar value fill_value : None or float value, default None (NaN) Fill existing missing (NaN) values, and any new element needed for successful Series alignment, with this value before computation. If data in both corresponding Series locations is missing the result will be missing. level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- Series The result of the operation. See Also -------- Series.{reverse} """ _arith_doc_FRAME = """ Binary operator %s with support to substitute a fill_value for missing data in one of the inputs Parameters ---------- other : Series, DataFrame, or constant axis : {0, 1, 'index', 'columns'} For Series input, axis to match Series index on fill_value : None or float value, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level Returns ------- result : DataFrame Notes ----- Mismatched indices will be unioned together """ _flex_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. With reverse version, `{reverse}`. Among flexible wrappers (`add`, `sub`, `mul`, `div`, `mod`, `pow`) to arithmetic operators: `+`, `-`, `*`, `/`, `//`, `%`, `**`. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}} Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). For Series input, axis to match Series index on. level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. fill_value : float or None, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing. Returns ------- DataFrame Result of the arithmetic operation. See Also -------- DataFrame.add : Add DataFrames. DataFrame.sub : Subtract DataFrames. DataFrame.mul : Multiply DataFrames. DataFrame.div : Divide DataFrames (float division). DataFrame.truediv : Divide DataFrames (float division). DataFrame.floordiv : Divide DataFrames (integer division). DataFrame.mod : Calculate modulo (remainder after division). DataFrame.pow : Calculate exponential power. Notes ----- Mismatched indices will be unioned together. Examples -------- >>> df = pd.DataFrame({{'angles': [0, 3, 4], ... 'degrees': [360, 180, 360]}}, ... index=['circle', 'triangle', 'rectangle']) >>> df angles degrees circle 0 360 triangle 3 180 rectangle 4 360 Add a scalar with operator version which return the same results. >>> df + 1 angles degrees circle 1 361 triangle 4 181 rectangle 5 361 >>> df.add(1) angles degrees circle 1 361 triangle 4 181 rectangle 5 361 Divide by constant with reverse version. >>> df.div(10) angles degrees circle 0.0 36.0 triangle 0.3 18.0 rectangle 0.4 36.0 >>> df.rdiv(10) angles degrees circle inf 0.027778 triangle 3.333333 0.055556 rectangle 2.500000 0.027778 Subtract a list and Series by axis with operator version. >>> df - [1, 2] angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub([1, 2], axis='columns') angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub(pd.Series([1, 1, 1], index=['circle', 'triangle', 'rectangle']), ... axis='index') angles degrees circle -1 359 triangle 2 179 rectangle 3 359 Multiply a DataFrame of different shape with operator version. >>> other = pd.DataFrame({{'angles': [0, 3, 4]}}, ... index=['circle', 'triangle', 'rectangle']) >>> other angles circle 0 triangle 3 rectangle 4 >>> df * other angles degrees circle 0 NaN triangle 9 NaN rectangle 16 NaN >>> df.mul(other, fill_value=0) angles degrees circle 0 0.0 triangle 9 0.0 rectangle 16 0.0 Divide by a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'angles': [0, 3, 4, 4, 5, 6], ... 'degrees': [360, 180, 360, 360, 540, 720]}}, ... index=[['A', 'A', 'A', 'B', 'B', 'B'], ... ['circle', 'triangle', 'rectangle', ... 'square', 'pentagon', 'hexagon']]) >>> df_multindex angles degrees A circle 0 360 triangle 3 180 rectangle 4 360 B square 4 360 pentagon 5 540 hexagon 6 720 >>> df.div(df_multindex, level=1, fill_value=0) angles degrees A circle NaN 1.0 triangle 1.0 1.0 rectangle 1.0 1.0 B square 0.0 0.0 pentagon 0.0 0.0 hexagon 0.0 0.0 """ _flex_comp_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Among flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison operators. Equivalent to `==`, `=!`, `<=`, `<`, `>=`, `>` with support to choose axis (rows or columns) and level for comparison. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}}, default 'columns' Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- DataFrame of bool Result of the comparison. See Also -------- DataFrame.eq : Compare DataFrames for equality elementwise. DataFrame.ne : Compare DataFrames for inequality elementwise. DataFrame.le : Compare DataFrames for less than inequality or equality elementwise. DataFrame.lt : Compare DataFrames for strictly less than inequality elementwise. DataFrame.ge : Compare DataFrames for greater than inequality or equality elementwise. DataFrame.gt : Compare DataFrames for strictly greater than inequality elementwise. Notes ----- Mismatched indices will be unioned together. `NaN` values are considered different (i.e. `NaN` != `NaN`). Examples -------- >>> df = pd.DataFrame({{'cost': [250, 150, 100], ... 'revenue': [100, 250, 300]}}, ... index=['A', 'B', 'C']) >>> df cost revenue A 250 100 B 150 250 C 100 300 Comparison with a scalar, using either the operator or method: >>> df == 100 cost revenue A False True B False False C True False >>> df.eq(100) cost revenue A False True B False False C True False When `other` is a :class:`Series`, the columns of a DataFrame are aligned with the index of `other` and broadcast: >>> df != pd.Series([100, 250], index=["cost", "revenue"]) cost revenue A True True B True False C False True Use the method to control the broadcast axis: >>> df.ne(pd.Series([100, 300], index=["A", "D"]), axis='index') cost revenue A True False B True True C True True D True True When comparing to an arbitrary sequence, the number of columns must match the number elements in `other`: >>> df == [250, 100] cost revenue A True True B False False C False False Use the method to control the axis: >>> df.eq([250, 250, 100], axis='index') cost revenue A True False B False True C True False Compare to a DataFrame of different shape. >>> other = pd.DataFrame({{'revenue': [300, 250, 100, 150]}}, ... index=['A', 'B', 'C', 'D']) >>> other revenue A 300 B 250 C 100 D 150 >>> df.gt(other) cost revenue A False False B False False C False True D False False Compare to a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'cost': [250, 150, 100, 150, 300, 220], ... 'revenue': [100, 250, 300, 200, 175, 225]}}, ... index=[['Q1', 'Q1', 'Q1', 'Q2', 'Q2', 'Q2'], ... ['A', 'B', 'C', 'A', 'B', 'C']]) >>> df_multindex cost revenue Q1 A 250 100 B 150 250 C 100 300 Q2 A 150 200 B 300 175 C 220 225 >>> df.le(df_multindex, level=1) cost revenue Q1 A True True B True True C True True Q2 A False True B True False C True False """ def _make_flex_doc(op_name, typ): """ Make the appropriate substitutions for the given operation and class-typ into either _flex_doc_SERIES or _flex_doc_FRAME to return the docstring to attach to a generated method. Parameters ---------- op_name : str {'__add__', '__sub__', ... '__eq__', '__ne__', ...} typ : str {series, 'dataframe']} Returns ------- doc : str """ op_name = op_name.replace('__', '') op_desc = _op_descriptions[op_name] if op_name.startswith('r'): equiv = 'other ' + op_desc['op'] + ' ' + typ else: equiv = typ + ' ' + op_desc['op'] + ' other' if typ == 'series': base_doc = _flex_doc_SERIES doc_no_examples = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) if op_desc['series_examples']: doc = doc_no_examples + op_desc['series_examples'] else: doc = doc_no_examples elif typ == 'dataframe': base_doc = _flex_doc_FRAME doc = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) else: raise AssertionError('Invalid typ argument.') return doc # ----------------------------------------------------------------------------- # Masking NA values and fallbacks for operations numpy does not support def fill_binop(left, right, fill_value): """ If a non-None fill_value is given, replace null entries in left and right with this value, but only in positions where _one_ of left/right is null, not both. Parameters ---------- left : array-like right : array-like fill_value : object Returns ------- left : array-like right : array-like Notes ----- Makes copies if fill_value is not None """ # TODO: can we make a no-copy implementation? if fill_value is not None: left_mask = isna(left) right_mask = isna(right) left = left.copy() right = right.copy() # one but not both mask = left_mask ^ right_mask left[left_mask & mask] = fill_value right[right_mask & mask] = fill_value return left, right def mask_cmp_op(x, y, op): """ Apply the function `op` to only non-null points in x and y. Parameters ---------- x : array-like y : array-like op : binary operation Returns ------- result : ndarray[bool] """ xrav = x.ravel() result = np.empty(x.size, dtype=bool) if isinstance(y, (np.ndarray, ABCSeries)): yrav = y.ravel() mask = notna(xrav) & notna(yrav) result[mask] = op(np.array(list(xrav[mask])), np.array(list(yrav[mask]))) else: mask = notna(xrav) result[mask] = op(np.array(list(xrav[mask])), y) if op == operator.ne: # pragma: no cover np.putmask(result, ~mask, True) else: np.putmask(result, ~mask, False) result = result.reshape(x.shape) return result def masked_arith_op(x, y, op): """ If the given arithmetic operation fails, attempt it again on only the non-null elements of the input array(s). Parameters ---------- x : np.ndarray y : np.ndarray, Series, Index op : binary operator """ # For Series `x` is 1D so ravel() is a no-op; calling it anyway makes # the logic valid for both Series and DataFrame ops. xrav = x.ravel() assert isinstance(x, (np.ndarray, ABCSeries)), type(x) if isinstance(y, (np.ndarray, ABCSeries, ABCIndexClass)): dtype = find_common_type([x.dtype, y.dtype]) result = np.empty(x.size, dtype=dtype) # PeriodIndex.ravel() returns int64 dtype, so we have # to work around that case. See GH#19956 yrav = y if is_period_dtype(y) else y.ravel() mask = notna(xrav) & notna(yrav) if yrav.shape != mask.shape: # FIXME: GH#5284, GH#5035, GH#19448 # Without specifically raising here we get mismatched # errors in Py3 (TypeError) vs Py2 (ValueError) # Note: Only = an issue in DataFrame case raise ValueError('Cannot broadcast operands together.') if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], com.values_from_object(yrav[mask])) else: assert is_scalar(y), type(y) assert isinstance(x, np.ndarray), type(x) # mask is only meaningful for x result = np.empty(x.size, dtype=x.dtype) mask = notna(xrav) # 1 ** np.nan is 1. So we have to unmask those. if op == pow: mask = np.where(x == 1, False, mask) elif op == rpow: mask = np.where(y == 1, False, mask) if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], y) result, changed = maybe_upcast_putmask(result, ~mask, np.nan) result = result.reshape(x.shape) # 2D compat return result def invalid_comparison(left, right, op): """ If a comparison has mismatched types and is not necessarily meaningful, follow python3 conventions by: - returning all-False for equality - returning all-True for inequality - raising TypeError otherwise Parameters ---------- left : array-like right : scalar, array-like op : operator.{eq, ne, lt, le, gt} Raises ------ TypeError : on inequality comparisons """ if op is operator.eq: res_values = np.zeros(left.shape, dtype=bool) elif op is operator.ne: res_values = np.ones(left.shape, dtype=bool) else: raise TypeError("Invalid comparison between dtype={dtype} and {typ}" .format(dtype=left.dtype, typ=type(right).__name__)) return res_values # ----------------------------------------------------------------------------- # Dispatch logic def should_series_dispatch(left, right, op): """ Identify cases where a DataFrame operation should dispatch to its Series counterpart. Parameters ---------- left : DataFrame right : DataFrame op : binary operator Returns ------- override : bool """ if left._is_mixed_type or right._is_mixed_type: return True if not len(left.columns) or not len(right.columns): # ensure obj.dtypes[0] exists for each obj return False ldtype = left.dtypes.iloc[0] rdtype = right.dtypes.iloc[0] if ((is_timedelta64_dtype(ldtype) and is_integer_dtype(rdtype)) or (is_timedelta64_dtype(rdtype) and is_integer_dtype(ldtype))): # numpy integer dtypes as timedelta64 dtypes in this scenario return True if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype): # in particular case where right is an array of DateOffsets return True return False def dispatch_to_series(left, right, func, str_rep=None, axis=None): """ Evaluate the frame operation func(left, right) by evaluating column-by-column, dispatching to the Series implementation. Parameters ---------- left : DataFrame right : scalar or DataFrame func : arithmetic or comparison operator str_rep : str or None, default None axis : {None, 0, 1, "index", "columns"} Returns ------- DataFrame """ # Note: we use iloc to access columns for compat with cases # with non-unique columns. import pandas.core.computation.expressions as expressions right = lib.item_from_zerodim(right) if lib.is_scalar(right) or np.ndim(right) == 0: def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} elif isinstance(right, ABCDataFrame): assert right._indexed_same(left) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[:, i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries) and axis == "columns": # We only get here if called via left._combine_match_columns, # in which case we specifically want to operate row-by-row assert right.index.equals(left.columns) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries): assert right.index.equals(left.index) # Handle other cases later def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} else: # Remaining cases have less-obvious dispatch rules raise NotImplementedError(right) new_data = expressions.evaluate(column_op, str_rep, left, right) result = left._constructor(new_data, index=left.index, copy=False) # Pin columns instead of passing to constructor for compat with # non-unique columns case result.columns = left.columns return result def dispatch_to_index_op(op, left, right, index_class): """ Wrap Series left in the given index_class to delegate the operation op to the index implementation. DatetimeIndex and TimedeltaIndex perform type checking, timezone handling, overflow checks, etc. Parameters ---------- op : binary operator (operator.add, operator.sub, ...) left : Series right : object index_class : DatetimeIndex or TimedeltaIndex Returns ------- result : object, usually DatetimeIndex, TimedeltaIndex, or Series """ left_idx = index_class(left) # avoid accidentally allowing integer add/sub. For datetime64[tz] dtypes, # left_idx may inherit a freq from a cached DatetimeIndex. # See discussion in GH#19147. if getattr(left_idx, 'freq', None) is not None: left_idx = left_idx._shallow_copy(freq=None) try: result = op(left_idx, right) except NullFrequencyError: # DatetimeIndex and TimedeltaIndex with freq == None raise ValueError # on add/sub of integers (or int-like). We re-raise as a TypeError. raise TypeError('incompatible type for a datetime/timedelta ' 'operation [{name}]'.format(name=op.__name__)) return result def dispatch_to_extension_op(op, left, right): """ Assume that left or right is a Series backed by an ExtensionArray, apply the operator defined by op. """ # The op calls will raise TypeError if the op is not defined # on the ExtensionArray # unbox Series and Index to arrays if isinstance(left, (ABCSeries, ABCIndexClass)): new_left = left._values else: new_left = left if isinstance(right, (ABCSeries, ABCIndexClass)): new_right = right._values else: new_right = right res_values = op(new_left, new_right) res_name = get_op_result_name(left, right) if op.__name__ in ['divmod', 'rdivmod']: return _construct_divmod_result( left, res_values, left.index, res_name) return _construct_result(left, res_values, left.index, res_name) # ----------------------------------------------------------------------------- # Functions that add arithmetic methods to objects, given arithmetic factory # methods def _get_method_wrappers(cls): """ Find the appropriate operation-wrappers to use when defining flex/special arithmetic, boolean, and comparison operations with the given class. Parameters ---------- cls : class Returns ------- arith_flex : function or None comp_flex : function or None arith_special : function comp_special : function bool_special : function Notes ----- None is only returned for SparseArray """ if issubclass(cls, ABCSparseSeries): # Be sure to catch this before ABCSeries and ABCSparseArray, # as they will both come see SparseSeries as a subclass arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SPARSE_SERIES comp_special = _arith_method_SPARSE_SERIES bool_special = _bool_method_SERIES # TODO: I don't think the functions defined by bool_method are tested elif issubclass(cls, ABCSeries): # Just Series; SparseSeries is caught above arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SERIES comp_special = _comp_method_SERIES bool_special = _bool_method_SERIES elif issubclass(cls, ABCSparseArray): arith_flex = None comp_flex = None arith_special = _arith_method_SPARSE_ARRAY comp_special = _arith_method_SPARSE_ARRAY bool_special = _arith_method_SPARSE_ARRAY elif issubclass(cls, ABCDataFrame): # Same for DataFrame and SparseDataFrame arith_flex = _arith_method_FRAME comp_flex = _flex_comp_method_FRAME arith_special = _arith_method_FRAME comp_special = _comp_method_FRAME bool_special = _arith_method_FRAME return arith_flex, comp_flex, arith_special, comp_special, bool_special def _create_methods(cls, arith_method, comp_method, bool_method, special): # creates actual methods based upon arithmetic, comp and bool method # constructors. have_divmod = issubclass(cls, ABCSeries) # divmod is available for Series and SparseSeries # yapf: disable new_methods = dict( add=arith_method(cls, operator.add, special), radd=arith_method(cls, radd, special), sub=arith_method(cls, operator.sub, special), mul=arith_method(cls, operator.mul, special), truediv=arith_method(cls, operator.truediv, special), floordiv=arith_method(cls, operator.floordiv, special), # Causes a floating point exception in the tests when numexpr enabled, # so for now no speedup mod=arith_method(cls, operator.mod, special), pow=arith_method(cls, operator.pow, special), # not entirely sure why this is necessary, but previously was included # so it's here to maintain compatibility rmul=arith_method(cls, rmul, special), rsub=arith_method(cls, rsub, special), rtruediv=arith_method(cls, rtruediv, special), rfloordiv=arith_method(cls, rfloordiv, special), rpow=arith_method(cls, rpow, special), rmod=arith_method(cls, rmod, special)) # yapf: enable new_methods['div'] = new_methods['truediv'] new_methods['rdiv'] = new_methods['rtruediv'] if have_divmod: # divmod doesn't have an op that is supported by numexpr new_methods['divmod'] = arith_method(cls, divmod, special) new_methods['rdivmod'] = arith_method(cls, rdivmod, special) new_methods.update(dict( eq=comp_method(cls, operator.eq, special), ne=comp_method(cls, operator.ne, special), lt=comp_method(cls, operator.lt, special), gt=comp_method(cls, operator.gt, special), le=comp_method(cls, operator.le, special), ge=comp_method(cls, operator.ge, special))) if bool_method: new_methods.update( dict(and_=bool_method(cls, operator.and_, special), or_=bool_method(cls, operator.or_, special), # For some reason ``^`` wasn't used in original. xor=bool_method(cls, operator.xor, special), rand_=bool_method(cls, rand_, special), ror_=bool_method(cls, ror_, special), rxor=bool_method(cls, rxor, special))) if special: dunderize = lambda x: '__{name}__'.format(name=x.strip('_')) else: dunderize = lambda x: x new_methods = {dunderize(k): v for k, v in new_methods.items()} return new_methods def add_methods(cls, new_methods): for name, method in new_methods.items(): # For most methods, if we find that the class already has a method # of the same name, it is OK to over-write it. The exception is # inplace methods (__iadd__, __isub__, ...) for SparseArray, which # retain the np.ndarray versions. force = not (issubclass(cls, ABCSparseArray) and name.startswith('__i')) if force or name not in cls.__dict__: setattr(cls, name, method) # ---------------------------------------------------------------------- # Arithmetic def add_special_arithmetic_methods(cls): """ Adds the full suite of special arithmetic methods (``__add__``, ``__sub__``, etc.) to the class. Parameters ---------- cls : class special methods will be defined and pinned to this class """ _, _, arith_method, comp_method, bool_method = _get_method_wrappers(cls) new_methods = _create_methods(cls, arith_method, comp_method, bool_method, special=True) # inplace operators (I feel like these should get passed an `inplace=True` # or just be removed def _wrap_inplace_method(method): """ return an inplace wrapper for this method """ def f(self, other): result = method(self, other) # this makes sure that we are aligned like the input # we are updating inplace so we want to ignore is_copy self._update_inplace(result.reindex_like(self, copy=False)._data, verify_is_copy=False) return self f.__name__ = "__i{name}__".format(name=method.__name__.strip("__")) return f new_methods.update( dict(__iadd__=_wrap_inplace_method(new_methods["__add__"]), __isub__=_wrap_inplace_method(new_methods["__sub__"]), __imul__=_wrap_inplace_method(new_methods["__mul__"]), __itruediv__=_wrap_inplace_method(new_methods["__truediv__"]), __ifloordiv__=_wrap_inplace_method(new_methods["__floordiv__"]), __imod__=_wrap_inplace_method(new_methods["__mod__"]), __ipow__=_wrap_inplace_method(new_methods["__pow__"]))) new_methods.update( dict(__iand__=_wrap_inplace_method(new_methods["__and__"]), __ior__=_wrap_inplace_method(new_methods["__or__"]), __ixor__=_wrap_inplace_method(new_methods["__xor__"]))) add_methods(cls, new_methods=new_methods) def add_flex_arithmetic_methods(cls): """ Adds the full suite of flex arithmetic methods (``pow``, ``mul``, ``add``) to the class. Parameters ---------- cls : class flex methods will be defined and pinned to this class """ flex_arith_method, flex_comp_method, _, _, _ = _get_method_wrappers(cls) new_methods = _create_methods(cls, flex_arith_method, flex_comp_method, bool_method=None, special=False) new_methods.update(dict(multiply=new_methods['mul'], subtract=new_methods['sub'], divide=new_methods['div'])) # opt out of bool flex methods for now assert not any(kname in new_methods for kname in ('ror_', 'rxor', 'rand_')) add_methods(cls, new_methods=new_methods) # ----------------------------------------------------------------------------- # Series def _align_method_SERIES(left, right, align_asobject=False): """ align lhs and rhs Series """ # ToDo: Different from _align_method_FRAME, list, tuple and ndarray # are not coerced here # because Series has inconsistencies described in #13637 if isinstance(right, ABCSeries): # avoid repeated alignment if not left.index.equals(right.index): if align_asobject: # to keep original value's dtype for bool ops left = left.astype(object) right = right.astype(object) left, right = left.align(right, copy=False) return left, right def _construct_result(left, result, index, name, dtype=None): """ If the raw op result has a non-None name (e.g. it is an Index object) and the name argument is None, then passing name to the constructor will not be enough; we still need to override the name attribute. """ out = left._constructor(result, index=index, dtype=dtype) out = out.__finalize__(left) out.name = name return out def _construct_divmod_result(left, result, index, name, dtype=None): """divmod returns a tuple of like indexed series instead of a single series. """ return ( _construct_result(left, result[0], index=index, name=name, dtype=dtype), _construct_result(left, result[1], index=index, name=name, dtype=dtype), ) def _arith_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) construct_result = (_construct_divmod_result if op in [divmod, rdivmod] else _construct_result) def na_op(x, y): """ Return the result of evaluating op on the passed in values. If native types are not compatible, try coersion to object dtype. Parameters ---------- x : array-like y : array-like or scalar Returns ------- array-like Raises ------ TypeError : invalid operation """ import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) except Exception: # TODO: more specific? if is_object_dtype(x): return libalgos.arrmap_object(x, lambda val: op(val, y)) raise result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result def wrapper(left, right): if isinstance(right, ABCDataFrame): return NotImplemented left, right = _align_method_SERIES(left, right) res_name = get_op_result_name(left, right) right = maybe_upcast_for_op(right) if is_categorical_dtype(left): raise TypeError("{typ} cannot perform the operation " "{op}".format(typ=type(left).__name__, op=str_rep)) elif is_datetime64_dtype(left) or is_datetime64tz_dtype(left): # Give dispatch_to_index_op a chance for tests like # test_dt64_series_add_intlike, which the index dispatching handles # specifically. result = dispatch_to_index_op(op, left, right, pd.DatetimeIndex) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) elif (is_extension_array_dtype(left) or (is_extension_array_dtype(right) and not is_scalar(right))): # GH#22378 disallow scalar to exclude e.g. "category", "Int64" return dispatch_to_extension_op(op, left, right) elif is_timedelta64_dtype(left): result = dispatch_to_index_op(op, left, right, pd.TimedeltaIndex) return construct_result(left, result, index=left.index, name=res_name) elif is_timedelta64_dtype(right): # We should only get here with non-scalar or timedelta64('NaT') # values for right # Note: we cannot use dispatch_to_index_op because # that may incorrectly raise TypeError when we # should get NullFrequencyError result = op(pd.Index(left), right) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) lvalues = left.values rvalues = right if isinstance(rvalues, ABCSeries): rvalues = rvalues.values with np.errstate(all='ignore'): result = na_op(lvalues, rvalues) return construct_result(left, result, index=left.index, name=res_name, dtype=None) wrapper.__name__ = op_name return wrapper def _comp_method_OBJECT_ARRAY(op, x, y): if isinstance(y, list): y = construct_1d_object_array_from_listlike(y) if isinstance(y, (np.ndarray, ABCSeries, ABCIndex)): if not is_object_dtype(y.dtype): y = y.astype(np.object_) if isinstance(y, (ABCSeries, ABCIndex)): y = y.values result = libops.vec_compare(x, y, op) else: result = libops.scalar_compare(x, y, op) return result def _comp_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) masker = _gen_eval_kwargs(op_name).get('masker', False) def na_op(x, y): # TODO: # should have guarantess on what x, y can be type-wise # Extension Dtypes are not called here # Checking that cases that were once handled here are no longer # reachable. assert not (is_categorical_dtype(y) and not is_scalar(y)) if is_object_dtype(x.dtype): result = _comp_method_OBJECT_ARRAY(op, x, y) elif is_datetimelike_v_numeric(x, y): return invalid_comparison(x, y, op) else: # we want to compare like types # we only want to convert to integer like if # we are not NotImplemented, otherwise # we would allow datetime64 (but viewed as i8) against # integer comparisons # we have a datetime/timedelta and may need to convert assert not needs_i8_conversion(x) mask = None if not is_scalar(y) and needs_i8_conversion(y): mask = isna(x) | isna(y) y = y.view('i8') x = x.view('i8') method = getattr(x, op_name, None) if method is not None: with np.errstate(all='ignore'): result = method(y) if result is NotImplemented: return invalid_comparison(x, y, op) else: result = op(x, y) if mask is not None and mask.any(): result[mask] = masker return result def wrapper(self, other, axis=None): # Validate the axis parameter if axis is not None: self._get_axis_number(axis) res_name = get_op_result_name(self, other) if isinstance(other, list): # TODO: same for tuples? other = np.asarray(other) if isinstance(other, ABCDataFrame): # pragma: no cover # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, ABCSeries) and not self._indexed_same(other): raise ValueError("Can only compare identically-labeled " "Series objects") elif is_categorical_dtype(self): # Dispatch to Categorical implementation; pd.CategoricalIndex # behavior is non-canonical GH#19513 res_values = dispatch_to_index_op(op, self, other, pd.Categorical) return self._constructor(res_values, index=self.index, name=res_name) elif is_datetime64_dtype(self) or is_datetime64tz_dtype(self): # Dispatch to DatetimeIndex to ensure identical # Series/Index behavior if (isinstance(other, datetime.date) and not isinstance(other, datetime.datetime)): # https://github.com/pandas-dev/pandas/issues/21152 # Compatibility for difference between Series comparison w/ # datetime and date msg = ( "Comparing Series of datetimes with 'datetime.date'. " "Currently, the 'datetime.date' is coerced to a " "datetime. In the future pandas will not coerce, " "and {future}. " "To retain the current behavior, " "convert the 'datetime.date' to a datetime with " "'pd.Timestamp'." ) if op in {operator.lt, operator.le, operator.gt, operator.ge}: future = "a TypeError will be raised" else: future = ( "'the values will not compare equal to the " "'datetime.date'" ) msg = '\n'.join(textwrap.wrap(msg.format(future=future))) warnings.warn(msg, FutureWarning, stacklevel=2) other = pd.Timestamp(other) res_values = dispatch_to_index_op(op, self, other, pd.DatetimeIndex) return self._constructor(res_values, index=self.index, name=res_name) elif is_timedelta64_dtype(self): res_values = dispatch_to_index_op(op, self, other, pd.TimedeltaIndex) return self._constructor(res_values, index=self.index, name=res_name) elif (is_extension_array_dtype(self) or (is_extension_array_dtype(other) and not is_scalar(other))): # Note: the `not is_scalar(other)` condition rules out # e.g. other == "category" return dispatch_to_extension_op(op, self, other) elif isinstance(other, ABCSeries): # By this point we have checked that self._indexed_same(other) res_values = na_op(self.values, other.values) # rename is needed in case res_name is None and res_values.name # is not. return self._constructor(res_values, index=self.index, name=res_name).rename(res_name) elif isinstance(other, (np.ndarray, pd.Index)): # do not check length of zerodim array # as it will broadcast if other.ndim != 0 and len(self) != len(other): raise ValueError('Lengths must match to compare') res_values = na_op(self.values, np.asarray(other)) result = self._constructor(res_values, index=self.index) # rename is needed in case res_name is None and self.name # is not. return result.__finalize__(self).rename(res_name) elif is_scalar(other) and isna(other): # numpy does not like comparisons vs None if op is operator.ne: res_values = np.ones(len(self), dtype=bool) else: res_values = np.zeros(len(self), dtype=bool) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') else: values = self.get_values() with np.errstate(all='ignore'): res = na_op(values, other) if is_scalar(res): raise TypeError('Could not compare {typ} type with Series' .format(typ=type(other))) # always return a full value series here res_values = com.values_from_object(res) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') wrapper.__name__ = op_name return wrapper def _bool_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def na_op(x, y): try: result = op(x, y) except TypeError: assert not isinstance(y, (list, ABCSeries, ABCIndexClass)) if isinstance(y, np.ndarray): # bool-bool dtype operations should be OK, should not get here assert not (is_bool_dtype(x) and is_bool_dtype(y)) x = ensure_object(x) y = ensure_object(y) result = libops.vec_binop(x, y, op) else: # let null fall thru assert lib.is_scalar(y) if not isna(y): y = bool(y) try: result = libops.scalar_binop(x, y, op) except (TypeError, ValueError, AttributeError, OverflowError, NotImplementedError): raise TypeError("cannot compare a dtyped [{dtype}] array " "with a scalar of type [{typ}]" .format(dtype=x.dtype, typ=type(y).__name__)) return result fill_int = lambda x: x.fillna(0) fill_bool = lambda x: x.fillna(False).astype(bool) def wrapper(self, other): is_self_int_dtype = is_integer_dtype(self.dtype) self, other = _align_method_SERIES(self, other, align_asobject=True) res_name = get_op_result_name(self, other) if isinstance(other, ABCDataFrame): # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, (ABCSeries, ABCIndexClass)): is_other_int_dtype = is_integer_dtype(other.dtype) other = fill_int(other) if is_other_int_dtype else fill_bool(other) ovalues = other.values finalizer = lambda x: x else: # scalars, list, tuple, np.array is_other_int_dtype = is_integer_dtype(np.asarray(other)) if is_list_like(other) and not isinstance(other, np.ndarray): # TODO: Can we do this before the is_integer_dtype check? # could the is_integer_dtype check be checking the wrong # thing? e.g. other = [[0, 1], [2, 3], [4, 5]]? other = construct_1d_object_array_from_listlike(other) ovalues = other finalizer = lambda x: x.__finalize__(self) # For int vs int `^`, `|`, `&` are bitwise operators and return # integer dtypes. Otherwise these are boolean ops filler = (fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool) res_values = na_op(self.values, ovalues) unfilled = self._constructor(res_values, index=self.index, name=res_name) filled = filler(unfilled) return finalizer(filled) wrapper.__name__ = op_name return wrapper def _flex_method_SERIES(cls, op, special): name = _get_op_name(op, special) doc = _make_flex_doc(name, 'series') @Appender(doc) def flex_wrapper(self, other, level=None, fill_value=None, axis=0): # validate axis if axis is not None: self._get_axis_number(axis) if isinstance(other, ABCSeries): return self._binop(other, op, level=level, fill_value=fill_value) elif isinstance(other, (np.ndarray, list, tuple)): if len(other) != len(self): raise ValueError('Lengths must be equal') other = self._constructor(other, self.index) return self._binop(other, op, level=level, fill_value=fill_value) else: if fill_value is not None: self = self.fillna(fill_value) return self._constructor(op(self, other), self.index).__finalize__(self) flex_wrapper.__name__ = name return flex_wrapper # ----------------------------------------------------------------------------- # DataFrame def _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None): """ Apply binary operator `func` to self, other using alignment and fill conventions determined by the fill_value, axis, and level kwargs. Parameters ---------- self : DataFrame other : Series func : binary operator fill_value : object, default None axis : {0, 1, 'columns', 'index', None}, default None level : int or None, default None Returns ------- result : DataFrame """ if fill_value is not None: raise NotImplementedError("fill_value {fill} not supported." .format(fill=fill_value)) if axis is not None: axis = self._get_axis_number(axis) if axis == 0: return self._combine_match_index(other, func, level=level) else: return self._combine_match_columns(other, func, level=level) else: if not len(other): return self * np.nan if not len(self): # Ambiguous case, use _series so works with DataFrame return self._constructor(data=self._series, index=self.index, columns=self.columns) # default axis is columns return self._combine_match_columns(other, func, level=level) def _align_method_FRAME(left, right, axis): """ convert rhs to meet lhs dims if input is list, tuple or np.ndarray """ def to_series(right): msg = ('Unable to coerce to Series, length must be {req_len}: ' 'given {given_len}') if axis is not None and left._get_axis_name(axis) == 'index': if len(left.index) != len(right): raise ValueError(msg.format(req_len=len(left.index), given_len=len(right))) right = left._constructor_sliced(right, index=left.index) else: if len(left.columns) != len(right): raise ValueError(msg.format(req_len=len(left.columns), given_len=len(right))) right = left._constructor_sliced(right, index=left.columns) return right if isinstance(right, np.ndarray): if right.ndim == 1: right = to_series(right) elif right.ndim == 2: if right.shape == left.shape: right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[0] == left.shape[0] and right.shape[1] == 1: # Broadcast across columns right = np.broadcast_to(right, left.shape) right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[1] == left.shape[1] and right.shape[0] == 1: # Broadcast along rows right = to_series(right[0, :]) else: raise ValueError("Unable to coerce to DataFrame, shape " "must be {req_shape}: given {given_shape}" .format(req_shape=left.shape, given_shape=right.shape)) elif right.ndim > 2: raise ValueError('Unable to coerce to Series/DataFrame, dim ' 'must be <= 2: {dim}'.format(dim=right.shape)) elif (is_list_like(right) and not isinstance(right, (ABCSeries, ABCDataFrame))): # GH17901 right = to_series(right) return right def _arith_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result if op_name in _op_descriptions: # i.e. include "add" but not "__add__" doc = _make_flex_doc(op_name, 'dataframe') else: doc = _arith_doc_FRAME % op_name @Appender(doc) def f(self, other, axis=default_axis, level=None, fill_value=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame pass_op = op if should_series_dispatch(self, other, op) else na_op return self._combine_frame(other, pass_op, fill_value, level) elif isinstance(other, ABCSeries): # For these values of `axis`, we end up dispatching to Series op, # so do not want the masked op. pass_op = op if axis in [0, "columns", None] else na_op return _combine_series_frame(self, other, pass_op, fill_value=fill_value, axis=axis, level=level) else: if fill_value is not None: self = self.fillna(fill_value) assert np.ndim(other) == 0 return self._combine_const(other, op) f.__name__ = op_name return f def _flex_comp_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): try: with np.errstate(invalid='ignore'): result = op(x, y) except TypeError: result = mask_cmp_op(x, y, op) return result doc = _flex_comp_doc_FRAME.format(op_name=op_name, desc=_op_descriptions[op_name]['desc']) @Appender(doc) def f(self, other, axis=default_axis, level=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): self, other = self.align(other, 'outer', level=level, copy=False) return dispatch_to_series(self, other, na_op, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, na_op, fill_value=None, axis=axis, level=level) else: assert np.ndim(other) == 0, other return self._combine_const(other, na_op) f.__name__ = op_name return f def _comp_method_FRAME(cls, func, special): str_rep = _get_opstr(func, cls) op_name = _get_op_name(func, special) @Appender('Wrapper for comparison method {name}'.format(name=op_name)) def f(self, other): other = _align_method_FRAME(self, other, axis=None) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): raise ValueError('Can only compare identically-labeled ' 'DataFrame objects') return dispatch_to_series(self, other, func, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None) else: # straight boolean comparisons we want to allow all columns # (regardless of dtype to pass thru) See #4537 for discussion. res = self._combine_const(other, func) return res.fillna(True).astype(bool) f.__name__ = op_name return f # ----------------------------------------------------------------------------- # Sparse def _cast_sparse_series_op(left, right, opname): """ For SparseSeries operation, coerce to float64 if the result is expected to have NaN or inf values Parameters ---------- left : SparseArray right : SparseArray opname : str Returns ------- left : SparseArray right : SparseArray """ from pandas.core.sparse.api import SparseDtype opname = opname.strip('_') # TODO: This should be moved to the array? if is_integer_dtype(left) and is_integer_dtype(right): # series coerces to float64 if result should have NaN/inf if opname in ('floordiv', 'mod') and (right.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) elif opname in ('rfloordiv', 'rmod') and (left.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) return left, right def _arith_method_SPARSE_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): if isinstance(other, ABCDataFrame): return NotImplemented elif isinstance(other, ABCSeries): if not isinstance(other, ABCSparseSeries): other = other.to_sparse(fill_value=self.fill_value) return _sparse_series_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): new_values = op(self.values, other) return self._constructor(new_values, index=self.index, name=self.name) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper def _sparse_series_op(left, right, op, name): left, right = left.align(right, join='outer', copy=False) new_index = left.index new_name = get_op_result_name(left, right) from pandas.core.arrays.sparse import _sparse_array_op lvalues, rvalues = _cast_sparse_series_op(left.values, right.values, name) result = _sparse_array_op(lvalues, rvalues, op, name) return left._constructor(result, index=new_index, name=new_name) def _arith_method_SPARSE_ARRAY(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): from pandas.core.arrays.sparse.array import ( SparseArray, _sparse_array_op, _wrap_result, _get_fill) if isinstance(other, np.ndarray): if len(self) != len(other): raise AssertionError("length mismatch: {self} vs. {other}" .format(self=len(self), other=len(other))) if not isinstance(other, SparseArray): dtype = getattr(other, 'dtype', None) other = SparseArray(other, fill_value=self.fill_value, dtype=dtype) return _sparse_array_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): fill = op(_get_fill(self), np.asarray(other)) result = op(self.sp_values, other) return _wrap_result(op_name, result, self.sp_index, fill) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper
from datetime import timedelta import numpy as np import pytest import pandas as pd from pandas import Timedelta, TimedeltaIndex, timedelta_range, to_timedelta from pandas.core.arrays import TimedeltaArray import pandas.util.testing as tm class TestTimedeltaIndex: def test_verify_integrity_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(['1 Day'], verify_integrity=False) def test_range_kwargs_deprecated(self): # GH#23919 with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 Day', end='3 Days', freq='D') def test_int64_nocopy(self): # GH#23539 check that a copy isn't made when we pass int64 data # and copy=False arr = np.arange(10, dtype=np.int64) tdi = TimedeltaIndex(arr, copy=False) assert tdi._data._data.base is arr def test_infer_from_tdi(self): # GH#23539 # fast-path for inferring a frequency if the passed data already # has one tdi = pd.timedelta_range('1 second', periods=10**7, freq='1s') result = pd.TimedeltaIndex(tdi, freq='infer') assert result.freq == tdi.freq # check that inferred_freq was not called by checking that the # value has not been cached assert "inferred_freq" not in getattr(result, "_cache", {}) def test_infer_from_tdi_mismatch(self): # GH#23539 # fast-path for invalidating a frequency if the passed data already # has one and it does not match the `freq` input tdi = pd.timedelta_range('1 second', periods=100, freq='1s') msg = ("Inferred frequency .* from passed values does " "not conform to passed frequency") with pytest.raises(ValueError, match=msg): TimedeltaIndex(tdi, freq='D') with pytest.raises(ValueError, match=msg): # GH#23789 TimedeltaArray(tdi, freq='D') def test_dt64_data_invalid(self): # GH#23539 # passing tz-aware DatetimeIndex raises, naive or ndarray[datetime64] # does not yet, but will in the future dti = pd.date_range('2016-01-01', periods=3) msg = "cannot be converted to timedelta64" with pytest.raises(TypeError, match=msg): TimedeltaIndex(dti.tz_localize('Europe/Brussels')) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(dti) with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(np.asarray(dti)) def test_float64_ns_rounded(self): # GH#23539 without specifying a unit, floats are regarded as nanos, # and fractional portions are truncated tdi = TimedeltaIndex([2.3, 9.7]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # integral floats are non-lossy tdi = TimedeltaIndex([2.0, 9.0]) expected = TimedeltaIndex([2, 9]) tm.assert_index_equal(tdi, expected) # NaNs get converted to NaT tdi = TimedeltaIndex([2.0, np.nan]) expected = TimedeltaIndex([pd.Timedelta(nanoseconds=2), pd.NaT]) tm.assert_index_equal(tdi, expected) def test_float64_unit_conversion(self): # GH#23539 tdi = TimedeltaIndex([1.5, 2.25], unit='D') expected = TimedeltaIndex([Timedelta(days=1.5), Timedelta(days=2.25)]) tm.assert_index_equal(tdi, expected) def test_construction_base_constructor(self): arr = [pd.Timedelta('1 days'), pd.NaT, pd.Timedelta('3 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) arr = [np.nan, pd.NaT, pd.Timedelta('1 days')] tm.assert_index_equal(pd.Index(arr), pd.TimedeltaIndex(arr)) tm.assert_index_equal(pd.Index(np.array(arr)), pd.TimedeltaIndex(np.array(arr))) def test_constructor(self): expected = TimedeltaIndex(['1 days', '1 days 00:00:05', '2 days', '2 days 00:00:02', '0 days 00:00:03']) result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) tm.assert_index_equal(result, expected) # unicode result = TimedeltaIndex(['1 days', '1 days, 00:00:05', np.timedelta64( 2, 'D'), timedelta(days=2, seconds=2), pd.offsets.Second(3)]) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:01', '0 days 00:00:02']) tm.assert_index_equal(TimedeltaIndex(range(3), unit='s'), expected) expected = TimedeltaIndex(['0 days 00:00:00', '0 days 00:00:05', '0 days 00:00:09']) tm.assert_index_equal(TimedeltaIndex([0, 5, 9], unit='s'), expected) expected = TimedeltaIndex( ['0 days 00:00:00.400', '0 days 00:00:00.450', '0 days 00:00:01.200']) tm.assert_index_equal(TimedeltaIndex([400, 450, 1200], unit='ms'), expected) def test_constructor_iso(self): # GH #21877 expected = timedelta_range('1s', periods=9, freq='s') durations = ['P0DT0H0M{}S'.format(i) for i in range(1, 10)] result = to_timedelta(durations) tm.assert_index_equal(result, expected) def test_constructor_coverage(self): rng = timedelta_range('1 days', periods=10.5) exp = timedelta_range('1 days', periods=10) tm.assert_index_equal(rng, exp) msg = 'periods must be a number, got foo' with pytest.raises(TypeError, match=msg): timedelta_range(start='1 days', periods='foo', freq='D') with pytest.raises(ValueError): with tm.assert_produces_warning(FutureWarning): TimedeltaIndex(start='1 days', end='10 days') with pytest.raises(TypeError): TimedeltaIndex('1 days') # generator expression gen = (timedelta(i) for i in range(10)) result = TimedeltaIndex(gen) expected = TimedeltaIndex([timedelta(i) for i in range(10)]) tm.assert_index_equal(result, expected) # NumPy string array strings = np.array(['1 days', '2 days', '3 days']) result = TimedeltaIndex(strings) expected = to_timedelta([1, 2, 3], unit='d') tm.assert_index_equal(result, expected) from_ints = TimedeltaIndex(expected.asi8) tm.assert_index_equal(from_ints, expected) # non-conforming freq msg = ("Inferred frequency None from passed values does not conform to" " passed frequency D") with pytest.raises(ValueError, match=msg): TimedeltaIndex(['1 days', '2 days', '4 days'], freq='D') msg = ("Of the four parameters: start, end, periods, and freq, exactly" " three must be specified") with pytest.raises(ValueError, match=msg): timedelta_range(periods=10, freq='D') def test_constructor_name(self): idx = timedelta_range(start='1 days', periods=1, freq='D', name='TEST') assert idx.name == 'TEST' # GH10025 idx2 = TimedeltaIndex(idx, name='something else') assert idx2.name == 'something else' def test_constructor_no_precision_warns(self): # GH-24753, GH-24739 expected = pd.TimedeltaIndex(['2000'], dtype='timedelta64[ns]') # we set the stacklevel for DatetimeIndex with tm.assert_produces_warning(FutureWarning): result = pd.TimedeltaIndex(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = pd.Index(['2000'], dtype='timedelta64') tm.assert_index_equal(result, expected) def test_constructor_wrong_precision_raises(self): with pytest.raises(ValueError): pd.TimedeltaIndex(['2000'], dtype='timedelta64[us]')
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_construction.py
pandas/core/ops.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.conf import settings from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ from enumfields import Enum, EnumIntegerField from filer.fields.image import FilerImageField from jsonfield import JSONField from parler.models import TranslatedFields from shuup.core.fields import CurrencyField, InternalIdentifierField from shuup.core.pricing import TaxfulPrice, TaxlessPrice from shuup.utils.analog import define_log_model from ._base import ChangeProtected, TranslatableShuupModel from ._orders import Order def _get_default_currency(): return settings.SHUUP_HOME_CURRENCY class ShopStatus(Enum): DISABLED = 0 ENABLED = 1 class Labels: DISABLED = _('disabled') ENABLED = _('enabled') @python_2_unicode_compatible class Shop(ChangeProtected, TranslatableShuupModel): protected_fields = ["currency", "prices_include_tax"] change_protect_message = _("The following fields cannot be changed since there are existing orders for this shop") identifier = InternalIdentifierField(unique=True) domain = models.CharField(max_length=128, blank=True, null=True, unique=True, verbose_name=_("domain"), help_text=_( "Your shop domain name. Use this field to configure the URL that is used to visit your site. " "Note: this requires additional configuration through your internet domain registrar." )) status = EnumIntegerField(ShopStatus, default=ShopStatus.DISABLED, verbose_name=_("status"), help_text=_( "Your shop status. Disable your shop if it is no longer in use." )) owner = models.ForeignKey("Contact", blank=True, null=True, on_delete=models.SET_NULL, verbose_name=_("contact")) options = JSONField(blank=True, null=True, verbose_name=_("options")) currency = CurrencyField(default=_get_default_currency, verbose_name=_("currency"), help_text=_( "The primary shop currency. This is the currency used when selling your products." )) prices_include_tax = models.BooleanField(default=True, verbose_name=_("prices include tax"), help_text=_( "This option defines whether product prices entered in admin include taxes. " "Note this behavior can be overridden with contact group pricing." )) logo = FilerImageField(verbose_name=_("logo"), blank=True, null=True, on_delete=models.SET_NULL) maintenance_mode = models.BooleanField(verbose_name=_("maintenance mode"), default=False, help_text=_( "Check this if you would like to make your shop temporarily unavailable while you do some shop maintenance." )) contact_address = models.ForeignKey( "MutableAddress", verbose_name=_("contact address"), blank=True, null=True, on_delete=models.SET_NULL) translations = TranslatedFields( name=models.CharField(max_length=64, verbose_name=_("name"), help_text=_( "The shop name. This name is displayed throughout admin." )), public_name=models.CharField(max_length=64, verbose_name=_("public name"), help_text=_( "The public shop name. This name is displayed in the store front and in any customer email correspondence." )), maintenance_message=models.CharField( max_length=300, blank=True, verbose_name=_("maintenance message"), help_text=_( "The message to display to customers while your shop is in maintenance mode." ) ) ) def __str__(self): return self.safe_translation_getter("name", default="Shop %d" % self.pk) def create_price(self, value): """ Create a price with given value and settings of this shop. Takes the ``prices_include_tax`` and ``currency`` settings of this Shop into account. :type value: decimal.Decimal|int|str :rtype: shuup.core.pricing.Price """ if self.prices_include_tax: return TaxfulPrice(value, self.currency) else: return TaxlessPrice(value, self.currency) def _are_changes_protected(self): return Order.objects.filter(shop=self).exists() ShopLogEntry = define_log_model(Shop)
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from collections import Counter import pytest from django.core.exceptions import ImproperlyConfigured from shuup.admin.utils.urls import admin_url, get_model_url, NoModelUrl from shuup.core.models import Product from shuup_tests.admin.utils import admin_only_urls from shuup_tests.utils.faux_users import StaffUser def test_model_url(): with admin_only_urls(): with pytest.raises(NoModelUrl): get_model_url(Counter) # That's silly! p = Product() p.pk = 3 assert get_model_url(p) def test_model_url_with_permissions(): permissions = set(["shuup.add_product", "shuup.delete_product", "shuup.change_product"]) p = Product() p.pk = 3 # If no user is given, don't check for permissions assert get_model_url(p) # If a user is given and no permissions are provided, check for default model permissions user = StaffUser() with pytest.raises(NoModelUrl): assert get_model_url(p, user=user) # If a user is given and permissions are provided, check for those permissions assert get_model_url(p, user=user, required_permissions=()) with pytest.raises(NoModelUrl): assert get_model_url(p, user=user, required_permissions=["shuup.add_product"]) # Confirm that url is returned with correct permissions user.permissions = permissions assert get_model_url(p, user=user) assert get_model_url(p, user=user, required_permissions=permissions) def test_invalid_admin_url(): with pytest.raises(ImproperlyConfigured): admin_url("", "") def test_admin_url_prefix(): assert admin_url("", "foo", prefix="bar")._callback_str == "bar.foo"
suutari/shoop
shuup_tests/admin/test_urls.py
shuup/core/models/_shops.py
import ply.lex as lex states = ( ('quoted', 'exclusive'), ) reserved = [ 'SELECT', 'VERSION', 'AUTO_INCREMENT', 'CREATE', 'DEFAULT', 'FULL', 'INTEGER', 'KEY', 'NULL', 'PRIMARY', 'SHOW', 'TABLE', 'TABLES', 'VARCHAR', 'NOT', 'DATETIME', 'DATABASE', 'DATABASES', 'USE', 'INT', 'FROM', 'COMMIT', 'WHERE', 'OR', 'AND', 'IS', 'SET', 'AUTOCOMMIT', 'LONGTEXT', 'SMALLINT', 'UNSIGNED', 'BOOL', 'TINYINT', 'UNIQUE', 'NAMES', 'INSERT', 'INTO', 'VALUES', 'DROP', 'LIMIT', 'AS', 'UPDATE', 'COUNT', 'ORDER', 'BY', 'ASC', 'DESC' ] tokens = ['NUMBER', 'STRING', 'STRING_VALUE', 'GREATER_OR_EQ', 'LESS_OR_EQ', 'N_EQ'] + reserved literals = "(),`'.@=><*" t_ignore = ' \t\n' t_quoted_ignore = t_ignore t_NUMBER = r'[0-9]+' t_GREATER_OR_EQ = r'>=' t_LESS_OR_EQ = r'<=' t_N_EQ = r'<>|!=' def t_begin_quoted(t): r"""'""" t.lexer.begin('quoted') def t_quoted_STRING_VALUE(t): r"""[- :.a-zA-Z0-9$/+=_@]+""" if t.value.upper() in reserved: t.type = t.value.upper() t.value = t.value.upper() else: t.type = 'STRING_VALUE' return t def t_quoted_end(t): r"""'""" #t.type = 'STRING_VALUE' #t.value = None t.lexer.begin('INITIAL') #return t def t_STRING(t): r"""[_a-zA-Z0-9]*[_a-zA-Z]+[_a-zA-Z0-9]*""" if t.value.upper() in reserved: t.type = t.value.upper() t.value = t.value.upper() else: t.type = 'STRING' return t def t_error(t): print("Illegal character '%s'" % t.value[0]) t.lexer.skip(1) def t_quoted_error(t): t_error(t) lexer = lex.lex(debug=False)
#!/usr/bin/env python # -*- coding: utf-8 -*- import json import mock import pytest import time from pyetcd import EtcdResult, EtcdNodeExist, EtcdKeyNotFound from pyetcd.client import Client from etcdb import NotSupportedError, Time, Timestamp, DateFromTicks, TimeFromTicks, TimestampFromTicks, Binary, Date, \ _split_version, ProgrammingError, OperationalError, LOCK_WAIT_TIMEOUT from etcdb.connection import Connection from etcdb.cursor import Cursor, ColInfo from etcdb.etcddate import EtcdDate from etcdb.etcdstring import EtcdString from etcdb.etcdtime import EtcdTime from etcdb.etcdtimestamp import EtcdTimestamp from etcdb.sqlparser.sql_tree import SQLTree @pytest.fixture def t_2016_9_21_23_10_3(): return 1474499403.0 def test_EtcdDate(): d = EtcdDate(2016, 9, 11) assert d.year == 2016 assert d.month == 9 assert d.day == 11 def test_EtcdTime(): t = EtcdTime(15, 51, 10) assert t.hour == 15 assert t.minute == 51 assert t.second == 10 def test_EtcdTimestamp(): ts = EtcdTimestamp(2016, 9, 11, 15, 51, 10) assert ts.year == 2016 assert ts.month == 9 assert ts.day == 11 assert ts.hour == 15 assert ts.minute == 51 assert ts.second == 10 def test_EtcdString(): s = EtcdString('foo') assert s._string == 'foo' def test_Date(): d = Date(2016, 9, 11) assert isinstance(d, EtcdDate) assert d.year == 2016 assert d.month == 9 assert d.day == 11 def test_Time(): t = Time(15, 53, 10) assert isinstance(t, EtcdTime) assert t.hour == 15 assert t.minute == 53 assert t.second == 10 def test_Timestamp(): ts = Timestamp(2016, 9, 11, 15, 51, 10) assert isinstance(ts, EtcdTimestamp) assert ts.year == 2016 assert ts.month == 9 assert ts.day == 11 assert ts.hour == 15 assert ts.minute == 51 assert ts.second == 10 def test_DateFromTicks(t_2016_9_21_23_10_3): d = DateFromTicks(t_2016_9_21_23_10_3) assert d.year == time.localtime(t_2016_9_21_23_10_3).tm_year assert d.month == time.localtime(t_2016_9_21_23_10_3).tm_mon assert d.day == time.localtime(t_2016_9_21_23_10_3).tm_mday def test_TimeFromTicks(t_2016_9_21_23_10_3): t = TimeFromTicks(t_2016_9_21_23_10_3) assert t.hour == time.localtime(t_2016_9_21_23_10_3).tm_hour assert t.minute == time.localtime(t_2016_9_21_23_10_3).tm_min assert t.second == time.localtime(t_2016_9_21_23_10_3).tm_sec def test_TimestampFromTicks(t_2016_9_21_23_10_3): d = TimestampFromTicks(t_2016_9_21_23_10_3) assert d.year == time.localtime(t_2016_9_21_23_10_3).tm_year assert d.month == time.localtime(t_2016_9_21_23_10_3).tm_mon assert d.day == time.localtime(t_2016_9_21_23_10_3).tm_mday assert d.hour == time.localtime(t_2016_9_21_23_10_3).tm_hour assert d.minute == time.localtime(t_2016_9_21_23_10_3).tm_min assert d.second == time.localtime(t_2016_9_21_23_10_3).tm_sec def test_Binary(): s = Binary('foo') assert isinstance(s, EtcdString) @pytest.fixture def etcdb_connection(): return Connection(db='foo') @pytest.fixture def cursor(etcdb_connection): return etcdb_connection.cursor() def test_connection(etcdb_connection): assert isinstance(etcdb_connection._client, Client) def test_connection_commit(etcdb_connection): with pytest.raises(NotSupportedError): etcdb_connection.commit() def test_connection_rollback(etcdb_connection): with pytest.raises(NotSupportedError): etcdb_connection.rollback() def test_connection_cursor(etcdb_connection): assert isinstance(etcdb_connection.cursor(), Cursor) @mock.patch.object(Client, 'version') def test_cursor_execute(mock_client, etcdb_connection): cursor = etcdb_connection.cursor() assert cursor.execute('SELECT VERSION()') == 1 def test_split_version(): assert _split_version('a.b.c') == ('a', 'b', 'c') @mock.patch.object(Client, 'version') def test_cursor_fetchone(mock_client, etcdb_connection): mock_client.return_value = '2.3.7' cursor = etcdb_connection.cursor() cursor.execute('SELECT VERSION()') assert cursor.fetchone() == ('2.3.7', ) @pytest.mark.parametrize('kwargs,allowed_keys,kwargs_sanitized', [ ( { 'host': '10.10.10.10', 'port': 8888, 'foo': 'bar' }, ['host', 'port'], {'host': '10.10.10.10', 'port': 8888} ) ]) def test_sanitize_kwargs(kwargs, allowed_keys, kwargs_sanitized): assert Connection._santize_pyetcd_kwargs(kwargs, allowed_keys) == kwargs_sanitized @mock.patch.object(Client, 'mkdir') def test_create_database(mock_client, etcdb_connection): cursor = etcdb_connection.cursor() cursor._execute_create_database('foo') mock_client.assert_called_once_with('/foo') @pytest.mark.parametrize('db, payload,result', [ ( 'foo', """ { "action": "get", "node": { "createdIndex": 7, "dir": true, "key": "/foo", "modifiedIndex": 7, "nodes": [ { "createdIndex": 8, "dir": true, "key": "/foo/tbl", "modifiedIndex": 8 }, { "createdIndex": 9, "dir": true, "key": "/foo/tbl1", "modifiedIndex": 9 }, { "createdIndex": 10, "dir": true, "key": "/foo/tbl2", "modifiedIndex": 10 } ] } } """, (('Table',), (('tbl',), ('tbl1',), ('tbl2',))) ), ( 'test', """ { "action": "get", "node": { "createdIndex": 7, "dir": true, "key": "/test", "modifiedIndex": 7, "nodes": [ { "createdIndex": 8, "dir": true, "key": "/test/tbl", "modifiedIndex": 8 }, { "createdIndex": 9, "dir": true, "key": "/test/django_migrations", "modifiedIndex": 9 }, { "createdIndex": 10, "dir": true, "key": "/test/someverylongname", "modifiedIndex": 10 } ] } } """, (('Table',), (('tbl',), ('django_migrations',), ('someverylongname',))) ) ]) @mock.patch.object(Client, 'read') def test_show_tables(mock_client, db, payload, result, cursor): response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) tree = SQLTree() tree.db = db tree.options['full'] = False mock_client.return_value = etcd_result assert cursor._execute_show_tables(tree) == result mock_client.assert_called_once_with('/%s' % db) @pytest.mark.parametrize('payload,result', [ (""" { "action": "get", "node": { "createdIndex": 7, "dir": true, "key": "/foo", "modifiedIndex": 7, "nodes": [ { "createdIndex": 8, "dir": true, "key": "/foo/tbl", "modifiedIndex": 8 }, { "createdIndex": 9, "dir": true, "key": "/foo/tbl1", "modifiedIndex": 9 }, { "createdIndex": 10, "dir": true, "key": "/foo/tbl2", "modifiedIndex": 10 } ] } } """, (('Table', 'Type'), (('tbl', 'BASE TABLE'), ('tbl1', 'BASE TABLE'), ('tbl2', 'BASE TABLE'))) ), ( """{ "action": "get", "node": { "createdIndex": 7, "dir": true, "key": "/foo", "modifiedIndex": 7 } }""", (('Table', 'Type'), ()) ) ]) @mock.patch.object(Client, 'read') def test_show_full_tables(mock_client, payload, result, etcdb_connection): cursor = etcdb_connection.cursor() assert cursor._db == 'foo' response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) tree = SQLTree() tree.db = cursor._db tree.options['full'] = True mock_client.return_value = etcd_result assert cursor._execute_show_tables(tree) == result mock_client.assert_called_once_with('/foo') @mock.patch.object(Client, 'read') def test_show_tables_raises_exception_if_no_db(mock_client, etcdb_connection): cursor = etcdb_connection.cursor() cursor._db = None tree = SQLTree() with pytest.raises(OperationalError): cursor._execute_show_tables(tree) @mock.patch.object(Client, 'write') @mock.patch.object(Client, 'mkdir') def test_create_table(mock_mkdir, mock_write, cursor): tree = SQLTree() tree.db = 'foo' tree.table = 'bar' tree.fields = { 'id': { 'type': 'INT', 'options': { 'nullable': False, 'primary': True } } } cursor._execute_create_table(tree) mock_mkdir.assert_called_once_with('/foo/bar') mock_write.assert_called_once_with('/foo/bar/_fields', json.dumps(tree.fields)) @pytest.mark.parametrize('rows,result', [ ((('information_schema',), ('mysql',), ('performance_schema',), ('sys',), ('test',)), [ ('information_schema',), ('mysql',), ('performance_schema',), ('sys',), ('test',), None, None]), ((), [None]) ]) def test_fetch_one(rows, result, etcdb_connection): cursor = etcdb_connection.cursor() cursor._rows = rows for i in xrange(len(result)): assert cursor.fetchone() == result[i] @pytest.mark.parametrize('rows,result', [ ((('information_schema',), ('mysql',), ('performance_schema',), ('sys',), ('test',)), [ (('information_schema',), ('mysql',), ('performance_schema',), ('sys',), ('test',)), ()]), ((), [ (), () ]) ]) def test_fetch_all(rows, result, etcdb_connection): cursor = etcdb_connection.cursor() cursor._rows = rows for i in xrange(len(result)): assert cursor.fetchall() == result[i] @pytest.mark.parametrize('payload,result', [ ("""{"action":"get","node":{"dir":true}}""", (('Database', ), ())), ( """ { "action": "get", "node": { "dir": true, "nodes": [ { "createdIndex": 19, "dir": true, "key": "/foo", "modifiedIndex": 19 } ] } } """, (('Database', ), ((u'foo',),)) ), ( """ { "action": "get", "node": { "dir": true, "nodes": [ { "createdIndex": 20, "dir": true, "key": "/foo", "modifiedIndex": 20 }, { "createdIndex": 19, "dir": true, "key": "/bar", "modifiedIndex": 19 } ] } } """, (('Database', ), ((u'foo',), (u'bar',),)) ) ]) @mock.patch.object(Client, 'read') def test_show_databases(mock_client, payload, result, cursor): response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_client.return_value = etcd_result assert cursor._execute_show_databases() == result @mock.patch.object(Client, 'mkdir') def test_dbl_createdb(mock_client, etcdb_connection): mock_client.side_effect = EtcdNodeExist cursor = etcdb_connection.cursor() with pytest.raises(ProgrammingError): cursor._execute_create_database('foo') @mock.patch.object(Client, 'mkdir') def test_dbl_createtbl(mock_client, etcdb_connection): mock_client.side_effect = EtcdNodeExist cursor = etcdb_connection.cursor() tree = SQLTree() with pytest.raises(ProgrammingError): cursor._execute_create_table(tree) def test_use_db(etcdb_connection): cursor = etcdb_connection.cursor() cursor.execute('USE foo') assert cursor._db == 'foo' @mock.patch.object(Client, 'mkdir') def test_create_table_raises_exception_if_no_db(mock_client, etcdb_connection): cursor = etcdb_connection.cursor() cursor._db = None tree = SQLTree() with pytest.raises(OperationalError): cursor._execute_create_table(tree) @pytest.mark.parametrize('rows,n,result', [ ( (('information_schema',), ('mysql',), ('performance_schema',), ('sys',), ('test',)), 2, (('information_schema',), ('mysql',), ) ), ( (), 2, () ) ]) def test_fetch_many(rows, n, result, etcdb_connection): cursor = etcdb_connection.cursor() cursor._rows = rows assert cursor.fetchmany(n) == result def test_select_variable(etcdb_connection): query = "SELECT @@SQL_MODE" cursor = etcdb_connection.cursor() cursor.execute(query) print(cursor._rows) assert cursor.fetchone() == ('STRICT_ALL_TABLES', ) def test_commit(cursor): # Commit does nothing cursor.execute("COMMIT") @mock.patch.object(Client, 'rmdir') def test_drop_database(mock_client, cursor): cursor.execute("DROP DATABASE foo") mock_client.assert_called_once_with('/foo', recursive=True) def test_syntax_error_raises_exception(cursor): with pytest.raises(ProgrammingError): cursor.execute('foo') @mock.patch.object(Client, 'read') def test_desc_table(mock_client, cursor): etcd_result = mock.Mock() etcd_result.node = { 'key': '/foo/bar/_fields', 'value': '{"id": {"type": "INT", "options": {"nullable": true}}}' } mock_client.return_value = etcd_result cursor._db = 'foo' cursor.execute('DESC bar') mock_client.assert_called_once_with('/foo/bar/_fields') assert cursor.fetchone() == ('id', 'INT', 'YES', '', '', '', ) @mock.patch.object(Client, 'read') def test_desc_table_raises_no_db(mock_client): conn = Connection() c = conn.cursor() with pytest.raises(OperationalError): c.execute('DESC bar') @mock.patch.object(Client, 'read') def test_desc_nonexisting_table_raises_error(mock_read, cursor): mock_read.side_effect = EtcdKeyNotFound with pytest.raises(ProgrammingError): cursor.execute('desc foo') def test_create_table_must_define_pk(cursor): query = "CREATE TABLE t(id int)" with pytest.raises(ProgrammingError): cursor.execute(query) def test_create_table_pk_must_be_not_null(cursor): query = "CREATE TABLE t(id int primary key)" with pytest.raises(ProgrammingError): cursor.execute(query) @mock.patch.object(Client, 'read') def test_get_pk(mock_read, cursor): etcd_result = mock.Mock() etcd_result.node = { 'key': '/foo/bar/_fields', 'value': '{"id": {"type": "INT", "options": {"nullable": false, "primary": true}}}' } mock_read.return_value = etcd_result assert cursor._get_pk('foo', 'bar') == { 'id': { 'type': 'INT', 'options': { 'nullable': False, 'primary': True } } } @mock.patch.object(Client, 'read') def test_get_pk_name(mock_read, cursor): etcd_result = mock.Mock() etcd_result.node = { 'key': '/foo/bar/_fields', 'value': '{"id": {"type": "INT", "options": {"nullable": false, "primary": true}}}' } mock_read.return_value = etcd_result assert cursor._get_pk_name('foo', 'bar') == 'id' @mock.patch.object(Client, 'write') @mock.patch.object(Cursor, '_get_write_lock') @mock.patch.object(Cursor, '_release_write_lock') @mock.patch.object(Cursor, '_get_table_fields') @mock.patch.object(Cursor, '_set_next_auto_inc') def test_insert_table(mock_set_next_auto_inc, mock_get_table_fields, mock_release_write_lock, mock_get_write_lock, mock_write, cursor): mock_get_table_fields.return_value = json.loads(""" { "id": { "options": { "auto_increment": true, "nullable": false, "primary": true }, "type": "INT" }, "name": { "options": { "nullable": true }, "type": "VARCHAR" } }""") cursor._get_pk_name = mock.MagicMock() cursor._get_pk_name.return_value = 'id' cursor.execute("INSERT INTO t3 (id, name) VALUES(1, 'foo')") mock_write.assert_called_once_with('/foo/t3/1', '{"id": "1", "name": "foo"}') mock_get_write_lock.assert_called_once_with('foo', 't3') mock_release_write_lock.assert_called_once_with('foo', 't3') @mock.patch.object(Client, 'write') @mock.patch.object(Cursor, '_get_write_lock') @mock.patch.object(Cursor, '_release_write_lock') @mock.patch.object(Cursor, '_get_table_fields') @mock.patch.object(Cursor, '_get_next_auto_inc') @mock.patch.object(Cursor, '_set_next_auto_inc') def test_insert_table_auto_incremented(mock_set_next_auto_inc, mock_get_next_auto_inc, mock_get_table_fields, mock_release_write_lock, mock_get_write_lock, mock_write, cursor): mock_get_table_fields.return_value = json.loads('{"id": {"type": "INT", "options": {"auto_increment": true, "primary": true, "nullable": false}}, "name": {"type": "VARCHAR", "options": {"nullable": true}}}') mock_get_next_auto_inc.return_value = 10 cursor._get_pk_name = mock.MagicMock() cursor._get_pk_name.return_value = 'id' cursor.execute("INSERT INTO t3 (name) VALUES('foo')") mock_write.assert_called_once_with('/foo/t3/10', '{"id": "10", "name": "foo"}') # lock mock_get_write_lock.assert_called_once_with('foo', 't3') mock_release_write_lock.assert_called_once_with('foo', 't3') @pytest.mark.parametrize('db,tbl,pk,payload,result', [ ( 'foo', 'bar', { 'id': { 'type': 'INT', 'options': { 'nullable': False, 'primary': True } } }, """ { "action": "get", "node": { "dir": true, "nodes": [ { "createdIndex": 20, "dir": true, "key": "/foo/bar/1", "modifiedIndex": 20 }, { "createdIndex": 19, "dir": true, "key": "/foo/bar/2", "modifiedIndex": 19 }, { "createdIndex": 19, "dir": true, "key": "/foo/bar/3", "modifiedIndex": 19 } ] } } """, [1, 2, 3] ), ( 'foo', 'bar', { 'id': { 'type': 'INT', 'options': { 'nullable': False, 'primary': True } } }, """ { "action": "get", "node": { "dir": true } } """, [] ), ( 'a', 't1', { 'id': { 'type': 'INT', 'options': { 'nullable': False, 'primary': True } } }, """ { "action": "get", "node": { "createdIndex": 37, "dir": true, "key": "/a/t1", "modifiedIndex": 37, "nodes": [ { "createdIndex": 39, "key": "/a/t1/1", "modifiedIndex": 39, "value": "{\\"id\\": \\"1\\"}" }, { "createdIndex": 40, "key": "/a/t1/2", "modifiedIndex": 40, "value": "{\\"id\\": \\"2\\"}" }, { "createdIndex": 41, "key": "/a/t1/10", "modifiedIndex": 41, "value": "{\\"id\\": \\"10\\"}" } ] } } """, [1, 2, 10] ), ( 'a', 't1', { 'id': { 'type': 'VARCHAR', 'options': { 'nullable': False, 'primary': True } } }, """ { "action": "get", "node": { "createdIndex": 37, "dir": true, "key": "/a/t1", "modifiedIndex": 37, "nodes": [ { "createdIndex": 39, "key": "/a/t1/aaa", "modifiedIndex": 39, "value": "{\\"id\\": \\"aaa\\"}" }, { "createdIndex": 40, "key": "/a/t1/ccc", "modifiedIndex": 40, "value": "{\\"id\\": \\"ccc\\"}" }, { "createdIndex": 41, "key": "/a/t1/bbb", "modifiedIndex": 41, "value": "{\\"id\\": \\"bbb\\"}" } ] } } """, ['aaa', 'bbb', 'ccc'] ) ]) @mock.patch.object(Client, 'read') @mock.patch.object(Cursor, '_get_pk') def test_get_pks_returns_pks(mock_get_pk, mock_read, db, tbl, pk, payload, result, cursor): response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_read.return_value = etcd_result mock_get_pk.return_value = pk assert cursor._get_pks(db, tbl) == result @pytest.mark.parametrize('payload,result', [ ( '{"action":"get","node":{"key":"/a/t1/10","value":"{\\"id\\": \\"10\\"}","modifiedIndex":41,"createdIndex":41}}', ('10',) ) ]) @mock.patch.object(Client, 'read') @mock.patch.object(Cursor, '_get_read_lock') @mock.patch.object(Cursor, '_release_read_lock') def test_select_returns_records(mock_release_read_lock, mock_get_read_lock, mock_read, payload, result, cursor): cursor._get_pks = mock.Mock() cursor._get_pks.return_value = ['10'] response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_read.return_value = etcd_result cursor.execute("SELECT id from bar") assert cursor.fetchone() == result @pytest.mark.parametrize('column_names,rows,new_columns', [ ( ('Table', 'Type'), ( ('t1', 'BASIC TABLE'), ('t1t1t1t1t1', 'BASIC TABLE') ), (ColInfo(name='Table', width=10), ColInfo(name='Type', width=11)), ), ( ('Table', 'Type'), (), (ColInfo(name='Table', width=5), ColInfo(name='Type', width=4)) ), ( ('Table', 'Type'), None, (ColInfo(name='Table', width=5), ColInfo(name='Type', width=4)) ), ( ('Table', 'Type'), ( ('t1', 'foo'), ('t1', 'bar') ), (ColInfo(name='Table', width=5), ColInfo(name='Type', width=4)) ) ]) def test_update_columns(cursor, column_names, rows, new_columns): columns = cursor._update_columns(column_names, rows) print(new_columns) for i in xrange(len(column_names)): assert columns[i].width == new_columns[i].width assert columns[i].name == new_columns[i].name @pytest.mark.parametrize('payload,result', [ ( """ {"action": "get", "node": {"createdIndex": 70, "modifiedIndex": 70, "value": "{\\"applied\\": \\"aaa\\", \\"app\\": \\"bbb\\", \\"id\\": \\"1\\", \\"name\\": \\"ccc\\"}", "key": "/test/django_migrations/1"}} """, (('bbb', 'ccc'), ) ) ]) @mock.patch.object(Client, 'read') @mock.patch.object(Cursor, '_get_read_lock') @mock.patch.object(Cursor, '_release_read_lock') def test_select_from_django_migrations(mock_get_read_lock, mock_release_read_lock, mock_read, payload, result, cursor): cursor._get_pks = mock.Mock() cursor._get_pks.return_value = ['10'] response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_read.return_value = etcd_result query = "SELECT `django_migrations`.`app`, `django_migrations`.`name` FROM `django_migrations`" cursor.execute(query) assert cursor._rows == result @mock.patch('etcdb.cursor.time.time') def test_get_lock_raises_lock_wait_timeout(mock_time, cursor): mock_time.side_effect = [1475852136, 1475852136 + LOCK_WAIT_TIMEOUT] with pytest.raises(OperationalError): cursor._get_meta_lock('foo', 'bar') @mock.patch.object(Client, 'update_ttl') @mock.patch.object(Client, 'compare_and_swap') def test_get_meta_lock(mock_cas, mock_update_ttl, cursor): payload = '{"action":"set","node":{"key":"/foo/bar/_lock_meta","value":"","modifiedIndex":625,"createdIndex":625}}' response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_cas.return_value = etcd_result mock_update_ttl.side_effect = EtcdKeyNotFound response = cursor._get_meta_lock('foo', 'bar') mock_cas.assert_called_once_with('/foo/bar/_lock_meta', '', ttl=1, prev_exist=False) assert response.node['key'] == '/foo/bar/_lock_meta' assert response.node['value'] == '' @mock.patch.object(Cursor, '_get_meta_lock') @mock.patch.object(Cursor, '_release_meta_lock') @mock.patch.object(Cursor, '_write_lock_set') @mock.patch.object(Cursor, '_get_active_read_locks') @mock.patch.object(Client, 'write') @mock.patch('etcdb.cursor.uuid') def test_get_read_lock(mock_uuid, mock_write, mock_get_active_read_locks, mock_write_lock_set, mock_release_meta_lock, mock_get_meta_lock, cursor): mock_uuid.uuid4.return_value = 'foo_id' mock_get_active_read_locks.return_value = [] mock_write_lock_set.return_value = False cursor._get_read_lock('foo', 'bar') mock_write.assert_called_once_with('/foo/bar/_lock_read/foo_id', '', ttl=1) mock_get_meta_lock.assert_called_once_with('foo', 'bar') mock_release_meta_lock.assert_called_once_with('foo', 'bar') @mock.patch.object(Cursor, '_get_meta_lock') @mock.patch.object(Cursor, '_release_meta_lock') @mock.patch.object(Client, 'read') @mock.patch.object(Client, 'write') @mock.patch('etcdb.cursor.uuid') def test_get_read_lock_lock_free(mock_uuid, mock_write, mock_read, mock_release_meta_lock, mock_get_meta_lock, cursor): mock_read.side_effect = EtcdKeyNotFound mock_uuid.uuid4.return_value = 'lock_id' assert cursor._get_read_lock('foo', 'bar') == 'lock_id' mock_write.assert_called_once_with('/foo/bar/_lock_read/lock_id', '', ttl=cursor._timeout) @mock.patch.object(Client, 'read') def test_write_lock_set_false(mock_read, cursor): mock_read.side_effect = EtcdKeyNotFound assert not cursor._write_lock_set('foo', 'bar') mock_read.assert_called_once_with('/foo/bar/_lock_write') @mock.patch.object(Client, 'read') def test_write_lock_set_true(mock_read, cursor): assert cursor._write_lock_set('foo', 'bar') mock_read.assert_called_once_with('/foo/bar/_lock_write') @mock.patch.object(Client, 'read') def test_wait_until_write_lock_deleted(mock_read, cursor): mock_read.side_effect = [ mock.MagicMock(), EtcdKeyNotFound ] cursor._wait_until_write_lock_deleted('foo', 'bar') mock_read.assert_called_with('/foo/bar/_lock_write', wait=True) assert mock_read.call_count == 2 @pytest.mark.parametrize('payload,active_read_locks', [ ( """ { "action": "get", "node": { "createdIndex": 26468, "dir": true, "key": "/foo/t1/_lock_read", "modifiedIndex": 26468, "nodes": [ { "createdIndex": 26468, "key": "/foo/bar/_lock_read/xxx", "modifiedIndex": 26468, "value": "" } ] } } """, ['xxx'] ), ( '{"action":"get","node":{"key":"/foo/t1/_lock_read","dir":true,"modifiedIndex":26471,"createdIndex":26471}}', [] ), ( """ { "action": "get", "node": { "createdIndex": 26471, "dir": true, "key": "/foo/bar/_lock_read", "modifiedIndex": 26471, "nodes": [ { "createdIndex": 133081, "key": "/foo/bar/_lock_read/lock1", "modifiedIndex": 133081, "value": "" }, { "createdIndex": 133518, "key": "/foo/bar/_lock_read/lock2", "modifiedIndex": 133518, "value": "" } ] } } """, ['lock1', 'lock2'] ) ]) @mock.patch.object(Client, 'read') def test_get_active_read_locks(mock_read, payload, active_read_locks, cursor): response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_read.return_value = etcd_result assert cursor._get_active_read_locks('foo', 'bar') == active_read_locks @mock.patch.object(Client, 'read') def test_get_active_read_locks_empty(mock_read, cursor): mock_read.side_effect = KeyError assert cursor._get_active_read_locks('foo', 'bar') == [] @mock.patch.object(Cursor, '_get_active_read_locks') @mock.patch.object(Client, 'delete') def test_release_read_lock(mock_delete, mock_get_active_read_locks, cursor): mock_get_active_read_locks.return_value = ['foo_id'] cursor._release_read_lock('foo', 'bar', 'foo_id') mock_delete.assert_called_once_with('/foo/bar/_lock_read/foo_id') @mock.patch.object(Cursor, '_get_meta_lock') @mock.patch.object(Cursor, '_release_meta_lock') @mock.patch.object(Cursor, '_write_lock_set') @mock.patch.object(Cursor, '_ensure_no_read_lock') @mock.patch.object(Client, 'write') def test_get_write_lock(mock_write, mock_ensure_no_read_lock, mock_write_lock_set, mock_release, mock_get, cursor): mock_write_lock_set.return_value = False cursor._get_write_lock('foo', 'bar') mock_write.assert_called_once_with('/foo/bar/_lock_write', '', ttl=1) @mock.patch.object(Cursor, '_get_active_read_locks') def test_ensure_no_read_lock_no_active_reads(mock_get_active_read_locks, cursor): mock_get_active_read_locks.return_value = [] cursor._ensure_no_read_lock('foo', 'bar') @mock.patch.object(Cursor, '_get_active_read_locks') @mock.patch.object(Cursor, '_wait_until_read_lock_released') def test_ensure_no_read_lock_with_active_reads(mock_wait_until_read_lock_released, mock_get_active_read_locks, cursor): mock_get_active_read_locks.return_value = ['some lock'] cursor._ensure_no_read_lock('foo', 'bar') mock_wait_until_read_lock_released.assert_called_once_with('foo', 'bar', 'some lock') @mock.patch.object(Cursor, '_get_write_lock') @mock.patch.object(Cursor, '_release_write_lock') @mock.patch.object(Cursor, '_get_pk_name') @mock.patch.object(Cursor, '_get_table_fields') @mock.patch.object(Cursor, '_get_next_auto_inc') @mock.patch.object(Client, 'write') def test_lastrowid(mock_write, mock_get_next_auto_inc, mock_get_table_fields, mock_get_pk_name, mock_rwl, mock_gwl, cursor): mock_get_next_auto_inc.return_value = 10 mock_get_table_fields.return_value = json.loads(""" { "id": { "options": { "auto_increment": true, "nullable": false, "primary": true }, "type": "INT" }, "name": { "options": { "nullable": true }, "type": "VARCHAR" } }""") mock_get_pk_name.return_value = 'id' cursor.execute("INSERT INTO t1 (name) VALUES ('foo')") assert cursor.lastrowid == 10 @pytest.mark.parametrize('payload,rowcount', [ ( """ { "action": "get", "node": { "createdIndex": 303785, "dir": true, "key": "/foo/bar", "modifiedIndex": 303785, "nodes": [ { "createdIndex": 303794, "key": "/foo/bar/1", "modifiedIndex": 303794, "value": "{\\"id\\": \\"1\\"}" }, { "createdIndex": 303807, "key": "/foo/bar/2", "modifiedIndex": 303807, "value": "{\\"id\\": \\"2\\"}" }, { "createdIndex": 303819, "key": "/foo/bar/3", "modifiedIndex": 303819, "value": "{\\"id\\": \\"3\\"}" } ] } } """, 3 ), ( """ { "action": "get", "node": { "createdIndex": 303785, "dir": true, "key": "/foo/bar", "modifiedIndex": 303785 } } """, 0 ) ]) @mock.patch.object(Cursor, '_get_read_lock') @mock.patch.object(Cursor, '_release_read_lock') @mock.patch.object(Cursor, '_get_pks') @mock.patch.object(Client, 'read') def test_count_star(mock_read, mock_get_pks, mock_release_read_lock, mock_get_read_lock, payload, rowcount, cursor): response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_read.return_value = etcd_result mock_get_pks.return_value = [] query = "SELECT COUNT(*) AS `__count` FROM `foo_config`" assert cursor.execute(query) == 1 assert cursor.fetchone()[0] == rowcount mock_read.assert_called_once_with('/foo/foo_config') @pytest.mark.parametrize('payload,limit,result', [ ( """ { "action": "get", "node": { "dir": true, "nodes": [ { "createdIndex": 20, "dir": true, "key": "/foo/bar/1", "modifiedIndex": 20 }, { "createdIndex": 19, "dir": true, "key": "/foo/bar/2", "modifiedIndex": 19 }, { "createdIndex": 19, "dir": true, "key": "/foo/bar/3", "modifiedIndex": 19 } ] } } """, 2, [1, 2, 3] ), ( """ { "action": "get", "node": { "dir": true } } """, 1, [] ), ( """ { "action": "get", "node": { "createdIndex": 37, "dir": true, "key": "/foo/bar", "modifiedIndex": 37, "nodes": [ { "createdIndex": 39, "key": "/foo/bar/1", "modifiedIndex": 39, "value": "{\\"id\\": \\"1\\"}" }, { "createdIndex": 40, "key": "/foo/bar/10", "modifiedIndex": 40, "value": "{\\"id\\": \\"10\\"}" }, { "createdIndex": 41, "key": "/foo/bar/20", "modifiedIndex": 41, "value": "{\\"id\\": \\"20\\"}" } ] } } """, 2, [1, 10, 20] ) ]) @mock.patch.object(Client, 'read') @mock.patch.object(Cursor, '_get_pk') def test_get_pks_with_limit(mock_get_pk, mock_read, payload, limit, result, cursor): response = mock.MagicMock() response.content = payload etcd_result = EtcdResult(response) mock_read.return_value = etcd_result tree = SQLTree() tree.limit = limit mock_get_pk.return_value = { 'id': { 'type': 'INT', 'options': { 'nullable': False, 'primary': True } } } assert cursor._get_pks('foo', 'bar', tree=tree) == result
akuzminsky/etcdb
tests/unit/test_etcdb.py
etcdb/sqlparser/lexer.py
from __future__ import print_function import abc import collections import json import os import pydoc import re import sys import pager import pygments import six from dcos import constants, errors, util from pygments.formatters import Terminal256Formatter from pygments.lexers import JsonLexer logger = util.get_logger(__name__) class Emitter(object): """Abstract class for emitting events.""" @abc.abstractmethod def publish(self, event): """Publishes an event. :param event: event to publish :type event: any """ raise NotImplementedError class FlatEmitter(Emitter): """Simple emitter that sends all publish events to the provided handler. If no handler is provider then use :py:const:`DEFAULT_HANDLER`. :param handler: event handler to call when publish is called :type handler: func(event) where event is defined in :py:func:`FlatEmitter.publish` """ def __init__(self, handler=None): if handler is None: self._handler = DEFAULT_HANDLER else: self._handler = handler def publish(self, event): """Publishes an event. :param event: event to publish :type event: any """ self._handler(event) def print_handler(event): """Default handler for printing event to stdout. :param event: event to emit to stdout :type event: str, dict, list, or dcos.errors.Error """ pager_command = os.environ.get(constants.DCOS_PAGER_COMMAND_ENV) if event is None: # Do nothing pass elif isinstance(event, six.string_types): _page(event, pager_command) elif isinstance(event, errors.Error): print(event.error(), file=sys.stderr) sys.stderr.flush() elif (isinstance(event, collections.Mapping) or isinstance(event, collections.Sequence) or isinstance(event, bool) or isinstance(event, six.integer_types) or isinstance(event, float)): # These are all valid JSON types let's treat them different processed_json = _process_json(event, pager_command) _page(processed_json, pager_command) elif isinstance(event, errors.DCOSException): print(event, file=sys.stderr) else: logger.debug('Printing unknown type: %s, %r.', type(event), event) _page(event, pager_command) def publish_table(emitter, objs, table_fn, json_): """Publishes a json representation of `objs` if `json_` is True, otherwise, publishes a table representation. :param emitter: emitter to use for publishing :type emitter: Emitter :param objs: objects to print :type objs: [object] :param table_fn: function used to generate a PrettyTable from `objs` :type table_fn: objs -> PrettyTable :param json_: whether or not to publish a json representation :type json_: bool :rtype: None """ if json_: emitter.publish(objs) else: table = table_fn(objs) output = str(table) if output: emitter.publish(output) def _process_json(event, pager_command): """Conditionally highlights the supplied JSON value. :param event: event to emit to stdout :type event: str, dict, list, or dcos.errors.Error :returns: String representation of the supplied JSON value, possibly syntax-highlighted. :rtype: str """ json_output = json.dumps(event, sort_keys=True, indent=2) # Strip trailing whitespace json_output = re.sub(r'\s+$', '', json_output, 0, re.M) force_colors = False # TODO(CD): Introduce a --colors flag if not sys.stdout.isatty(): if force_colors: return _highlight_json(json_output) else: return json_output supports_colors = not util.is_windows_platform() pager_is_set = pager_command is not None should_highlight = force_colors or supports_colors and not pager_is_set if should_highlight: json_output = _highlight_json(json_output) return json_output def _page(output, pager_command=None): """Conditionally pipes the supplied output through a pager. :param output: :type output: object :param pager_command: :type pager_command: str """ output = str(output) if pager_command is None: pager_command = 'less -R' if not sys.stdout.isatty() or util.is_windows_platform(): print(output) return num_lines = output.count('\n') exceeds_tty_height = pager.getheight() - 1 < num_lines if exceeds_tty_height: pydoc.pipepager(output, cmd=pager_command) else: print(output) def _highlight_json(json_value): """ :param json_value: JSON value to syntax-highlight :type json_value: dict, list, number, string, boolean, or None :returns: A string representation of the supplied JSON value, highlighted for a terminal that supports ANSI colors. :rtype: str """ return pygments.highlight( json_value, JsonLexer(), Terminal256Formatter()).strip() DEFAULT_HANDLER = print_handler """The default handler for an emitter: :py:func:`print_handler`."""
import contextlib import json import os import re import threading from dcos import constants import pytest from six.moves.BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer from .common import (app, assert_command, assert_lines, config_set, config_unset, exec_command, list_deployments, popen_tty, show_app, watch_all_deployments, watch_deployment) def test_help(): stdout = b"""Deploy and manage applications on the DCOS Usage: dcos marathon --config-schema dcos marathon --info dcos marathon about dcos marathon app add [<app-resource>] dcos marathon app list [--json] dcos marathon app remove [--force] <app-id> dcos marathon app restart [--force] <app-id> dcos marathon app show [--app-version=<app-version>] <app-id> dcos marathon app start [--force] <app-id> [<instances>] dcos marathon app stop [--force] <app-id> dcos marathon app update [--force] <app-id> [<properties>...] dcos marathon app version list [--max-count=<max-count>] <app-id> dcos marathon deployment list [--json <app-id>] dcos marathon deployment rollback <deployment-id> dcos marathon deployment stop <deployment-id> dcos marathon deployment watch [--max-count=<max-count>] [--interval=<interval>] <deployment-id> dcos marathon task list [--json <app-id>] dcos marathon task show <task-id> dcos marathon group add [<group-resource>] dcos marathon group list [--json] dcos marathon group scale [--force] <group-id> <scale-factor> dcos marathon group show [--group-version=<group-version>] <group-id> dcos marathon group remove [--force] <group-id> dcos marathon group update [--force] <group-id> [<properties>...] Options: -h, --help Show this screen --info Show a short description of this subcommand --json Print json-formatted tasks --version Show version --force This flag disable checks in Marathon during update operations --app-version=<app-version> This flag specifies the application version to use for the command. The application version (<app-version>) can be specified as an absolute value or as relative value. Absolute version values must be in ISO8601 date format. Relative values must be specified as a negative integer and they represent the version from the currently deployed application definition --group-version=<group-version> This flag specifies the group version to use for the command. The group version (<group-version>) can be specified as an absolute value or as relative value. Absolute version values must be in ISO8601 date format. Relative values must be specified as a negative integer and they represent the version from the currently deployed group definition --config-schema Show the configuration schema for the Marathon subcommand --max-count=<max-count> Maximum number of entries to try to fetch and return --interval=<interval> Number of seconds to wait between actions Positional Arguments: <app-id> The application id <app-resource> Path to a file or HTTP(S) URL containing the app's JSON definition. If omitted, the definition is read from stdin. For a detailed description see (https://mesosphere.github.io/ marathon/docs/rest-api.html#post-/v2/apps). <deployment-id> The deployment id <group-id> The group id <group-resource> Path to a file or HTTP(S) URL containing the group's JSON definition. If omitted, the definition is read from stdin. For a detailed description see (https://mesosphere.github.io/ marathon/docs/rest-api.html#post-/v2/groups). <instances> The number of instances to start <properties> Must be of the format <key>=<value>. E.g. cpus=2.0. If omitted, properties are read from stdin. <task-id> The task id <scale-factor> The factor to scale an application group by """ assert_command(['dcos', 'marathon', '--help'], stdout=stdout) def test_version(): assert_command(['dcos', 'marathon', '--version'], stdout=b'dcos-marathon version SNAPSHOT\n') def test_info(): assert_command(['dcos', 'marathon', '--info'], stdout=b'Deploy and manage applications on the DCOS\n') def test_about(): returncode, stdout, stderr = exec_command(['dcos', 'marathon', 'about']) assert returncode == 0 assert stderr == b'' result = json.loads(stdout.decode('utf-8')) assert result['name'] == "marathon" @pytest.fixture def missing_env(): env = os.environ.copy() env.update({ constants.PATH_ENV: os.environ[constants.PATH_ENV], constants.DCOS_CONFIG_ENV: os.path.join("tests", "data", "missing_marathon_params.toml") }) return env def test_missing_config(missing_env): assert_command( ['dcos', 'marathon', 'app', 'list'], returncode=1, stderr=(b'Missing required config parameter: "core.dcos_url". ' b'Please run `dcos config set core.dcos_url <value>`.\n'), env=missing_env) def test_empty_list(): _list_apps() def test_add_app(): with _zero_instance_app(): _list_apps('zero-instance-app') def test_add_app_through_http(): with _zero_instance_app_through_http(): _list_apps('zero-instance-app') def test_add_app_bad_resource(): stderr = (b'Can\'t read from resource: bad_resource.\n' b'Please check that it exists.\n') assert_command(['dcos', 'marathon', 'app', 'add', 'bad_resource'], returncode=1, stderr=stderr) def test_add_app_with_filename(): with _zero_instance_app(): _list_apps('zero-instance-app') def test_remove_app(): with _zero_instance_app(): pass _list_apps() def test_add_bad_json_app(): with open('tests/data/marathon/apps/bad.json') as fd: returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'add'], stdin=fd) assert returncode == 1 assert stdout == b'' assert stderr.decode('utf-8').startswith('Error loading JSON: ') def test_add_existing_app(): with _zero_instance_app(): app_path = 'tests/data/marathon/apps/zero_instance_sleep_v2.json' with open(app_path) as fd: stderr = b"Application '/zero-instance-app' already exists\n" assert_command(['dcos', 'marathon', 'app', 'add'], returncode=1, stderr=stderr, stdin=fd) def test_show_app(): with _zero_instance_app(): show_app('zero-instance-app') def test_show_absolute_app_version(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') result = show_app('zero-instance-app') show_app('zero-instance-app', result['version']) def test_show_relative_app_version(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') show_app('zero-instance-app', "-1") def test_show_missing_relative_app_version(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') stderr = b"Application 'zero-instance-app' only has 2 version(s).\n" assert_command(['dcos', 'marathon', 'app', 'show', '--app-version=-2', 'zero-instance-app'], returncode=1, stderr=stderr) def test_show_missing_absolute_app_version(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'show', '--app-version=2000-02-11T20:39:32.972Z', 'zero-instance-app']) assert returncode == 1 assert stdout == b'' assert stderr.decode('utf-8').startswith( "Error: App '/zero-instance-app' does not exist") def test_show_bad_app_version(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') stderr = (b'Error: Invalid format: "20:39:32.972Z" is malformed at ' b'":39:32.972Z"\n') assert_command( ['dcos', 'marathon', 'app', 'show', '--app-version=20:39:32.972Z', 'zero-instance-app'], returncode=1, stderr=stderr) def test_show_bad_relative_app_version(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') assert_command( ['dcos', 'marathon', 'app', 'show', '--app-version=2', 'zero-instance-app'], returncode=1, stderr=b"Relative versions must be negative: 2\n") def test_start_missing_app(): assert_command( ['dcos', 'marathon', 'app', 'start', 'missing-id'], returncode=1, stderr=b"Error: App '/missing-id' does not exist\n") def test_start_app(): with _zero_instance_app(): _start_app('zero-instance-app') def test_start_already_started_app(): with _zero_instance_app(): _start_app('zero-instance-app') stdout = (b"Application 'zero-instance-app' already " b"started: 1 instances.\n") assert_command( ['dcos', 'marathon', 'app', 'start', 'zero-instance-app'], returncode=1, stdout=stdout) def test_stop_missing_app(): assert_command(['dcos', 'marathon', 'app', 'stop', 'missing-id'], returncode=1, stderr=b"Error: App '/missing-id' does not exist\n") def test_stop_app(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') watch_deployment(result[0]['id'], 60) returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'stop', 'zero-instance-app']) assert returncode == 0 assert stdout.decode().startswith('Created deployment ') assert stderr == b'' def test_stop_already_stopped_app(): with _zero_instance_app(): stdout = (b"Application 'zero-instance-app' already " b"stopped: 0 instances.\n") assert_command( ['dcos', 'marathon', 'app', 'stop', 'zero-instance-app'], returncode=1, stdout=stdout) def test_update_missing_app(): assert_command(['dcos', 'marathon', 'app', 'update', 'missing-id'], stderr=b"Error: App '/missing-id' does not exist\n", returncode=1) def test_update_missing_field(): with _zero_instance_app(): returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'update', 'zero-instance-app', 'missing="a string"']) assert returncode == 1 assert stdout == b'' assert stderr.decode('utf-8').startswith( "Error: 'missing' is not a valid property. " "Possible properties are: ") def test_update_bad_type(): with _zero_instance_app(): returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'update', 'zero-instance-app', 'cpus="a string"']) assert returncode == 1 assert stderr.decode('utf-8').startswith( "Unable to parse 'a string' as a float: could not convert string " "to float: ") assert stdout == b'' def test_update_invalid_request(): returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'update', '{', 'instances']) assert returncode == 1 assert stdout == b'' stderr = stderr.decode() assert stderr.startswith('Error on request') assert stderr.endswith('HTTP 400: Bad Request\n') def test_app_add_invalid_request(): path = os.path.join( 'tests', 'data', 'marathon', 'apps', 'app_add_400.json') returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'add', path]) assert returncode == 1 assert stdout == b'' assert re.match(b"Error on request \[POST .*\]: HTTP 400: Bad Request:", stderr) stderr_end = b"""{ "details": [ { "errors": [ "host is not a valid network type" ], "path": "/container/docker/network" } ], "message": "Invalid JSON" } """ assert stderr.endswith(stderr_end) def test_update_app(): with _zero_instance_app(): returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'update', 'zero-instance-app', 'cpus=1', 'mem=20', "cmd='sleep 100'"]) assert returncode == 0 assert stdout.decode().startswith('Created deployment ') assert stderr == b'' def test_update_app_from_stdin(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') def test_restarting_stopped_app(): with _zero_instance_app(): stdout = (b"Unable to perform rolling restart of application '" b"/zero-instance-app' because it has no running tasks\n") assert_command( ['dcos', 'marathon', 'app', 'restart', 'zero-instance-app'], returncode=1, stdout=stdout) def test_restarting_missing_app(): assert_command(['dcos', 'marathon', 'app', 'restart', 'missing-id'], returncode=1, stderr=b"Error: App '/missing-id' does not exist\n") def test_restarting_app(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') watch_deployment(result[0]['id'], 60) returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'restart', 'zero-instance-app']) assert returncode == 0 assert stdout.decode().startswith('Created deployment ') assert stderr == b'' def test_list_version_missing_app(): assert_command( ['dcos', 'marathon', 'app', 'version', 'list', 'missing-id'], returncode=1, stderr=b"Error: App '/missing-id' does not exist\n") def test_list_version_negative_max_count(): assert_command(['dcos', 'marathon', 'app', 'version', 'list', 'missing-id', '--max-count=-1'], returncode=1, stderr=b'Maximum count must be a positive number: -1\n') def test_list_version_app(): with _zero_instance_app(): _list_versions('zero-instance-app', 1) _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') _list_versions('zero-instance-app', 2) def test_list_version_max_count(): with _zero_instance_app(): _update_app( 'zero-instance-app', 'tests/data/marathon/apps/update_zero_instance_sleep.json') _list_versions('zero-instance-app', 1, 1) _list_versions('zero-instance-app', 2, 2) _list_versions('zero-instance-app', 2, 3) def test_list_empty_deployment(): list_deployments(0) def test_list_deployment(): with _zero_instance_app(): _start_app('zero-instance-app', 3) list_deployments(1) def test_list_deployment_table(): """Simple sanity check for listing deployments with a table output. The more specific testing is done in unit tests. """ with _zero_instance_app(): _start_app('zero-instance-app', 3) assert_lines(['dcos', 'marathon', 'deployment', 'list'], 2) def test_list_deployment_missing_app(): with _zero_instance_app(): _start_app('zero-instance-app') list_deployments(0, 'missing-id') def test_list_deployment_app(): with _zero_instance_app(): _start_app('zero-instance-app', 3) list_deployments(1, 'zero-instance-app') def test_rollback_missing_deployment(): assert_command( ['dcos', 'marathon', 'deployment', 'rollback', 'missing-deployment'], returncode=1, stderr=b'Error: DeploymentPlan missing-deployment does not exist\n') def test_rollback_deployment(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'deployment', 'rollback', result[0]['id']]) result = json.loads(stdout.decode('utf-8')) assert returncode == 0 assert 'deploymentId' in result assert 'version' in result assert stderr == b'' list_deployments(0) def test_stop_deployment(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') assert_command( ['dcos', 'marathon', 'deployment', 'stop', result[0]['id']]) list_deployments(0) def test_watching_missing_deployment(): watch_deployment('missing-deployment', 1) def test_watching_deployment(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') watch_deployment(result[0]['id'], 60) list_deployments(0, 'zero-instance-app') def test_list_empty_task(): _list_tasks(0) def test_list_empty_task_not_running_app(): with _zero_instance_app(): _list_tasks(0) def test_list_tasks(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') watch_deployment(result[0]['id'], 60) _list_tasks(3) def test_list_tasks_table(): with _zero_instance_app(): _start_app('zero-instance-app', 3) watch_all_deployments() assert_lines(['dcos', 'marathon', 'task', 'list'], 4) def test_list_app_tasks(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') watch_deployment(result[0]['id'], 60) _list_tasks(3, 'zero-instance-app') def test_list_missing_app_tasks(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') watch_deployment(result[0]['id'], 60) _list_tasks(0, 'missing-id') def test_show_missing_task(): returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'task', 'show', 'missing-id']) stderr = stderr.decode('utf-8') assert returncode == 1 assert stdout == b'' assert stderr.startswith("Task '") assert stderr.endswith("' does not exist\n") def test_show_task(): with _zero_instance_app(): _start_app('zero-instance-app', 3) result = list_deployments(1, 'zero-instance-app') watch_deployment(result[0]['id'], 60) result = _list_tasks(3, 'zero-instance-app') returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'task', 'show', result[0]['id']]) result = json.loads(stdout.decode('utf-8')) assert returncode == 0 assert result['appId'] == '/zero-instance-app' assert stderr == b'' def test_bad_configuration(): config_set('marathon.url', 'http://localhost:88888') returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'list']) assert returncode == 1 assert stdout == b'' assert stderr.startswith( b"URL [http://localhost:88888/v2/info] is unreachable") config_unset('marathon.url') def test_app_locked_error(): with app('tests/data/marathon/apps/sleep_two_instances.json', '/sleep-two-instances'): assert_command( ['dcos', 'marathon', 'app', 'stop', 'sleep-two-instances'], returncode=1, stderr=(b'App or group is locked by one or more deployments. ' b'Override with --force.\n')) def test_app_add_no_tty(): proc, master = popen_tty('dcos marathon app add') stdout, stderr = proc.communicate() os.close(master) print(stdout) print(stderr) assert proc.wait() == 1 assert stdout == b'' assert stderr == (b"We currently don't support reading from the TTY. " b"Please specify an application JSON.\n" b"Usage: dcos marathon app add < app_resource.json\n") def _list_apps(app_id=None): returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'list', '--json']) result = json.loads(stdout.decode('utf-8')) if app_id is None: assert len(result) == 0 else: assert len(result) == 1 assert result[0]['id'] == '/' + app_id assert returncode == 0 assert stderr == b'' return result def _start_app(app_id, instances=None): cmd = ['dcos', 'marathon', 'app', 'start', app_id] if instances is not None: cmd.append(str(instances)) returncode, stdout, stderr = exec_command(cmd) assert returncode == 0 assert stdout.decode().startswith('Created deployment ') assert stderr == b'' def _update_app(app_id, file_path): with open(file_path) as fd: returncode, stdout, stderr = exec_command( ['dcos', 'marathon', 'app', 'update', app_id], stdin=fd) assert returncode == 0 assert stdout.decode().startswith('Created deployment ') assert stderr == b'' def _list_versions(app_id, expected_count, max_count=None): cmd = ['dcos', 'marathon', 'app', 'version', 'list', app_id] if max_count is not None: cmd.append('--max-count={}'.format(max_count)) returncode, stdout, stderr = exec_command(cmd) result = json.loads(stdout.decode('utf-8')) assert returncode == 0 assert isinstance(result, list) assert len(result) == expected_count assert stderr == b'' def _list_tasks(expected_count, app_id=None): cmd = ['dcos', 'marathon', 'task', 'list', '--json'] if app_id is not None: cmd.append(app_id) returncode, stdout, stderr = exec_command(cmd) result = json.loads(stdout.decode('utf-8')) assert returncode == 0 assert len(result) == expected_count assert stderr == b'' return result @contextlib.contextmanager def _zero_instance_app(): with app('tests/data/marathon/apps/zero_instance_sleep.json', 'zero-instance-app'): yield @contextlib.contextmanager def _zero_instance_app_through_http(): class JSONRequestHandler (BaseHTTPRequestHandler): def do_GET(self): self.send_response(200) self.send_header("Content-type", "application/json") self.end_headers() self.wfile.write(open( 'tests/data/marathon/apps/zero_instance_sleep.json', 'rb').read()) host = 'localhost' port = 12345 server = HTTPServer((host, port), JSONRequestHandler) thread = threading.Thread(target=server.serve_forever) thread.setDaemon(True) thread.start() with app('http://{}:{}'.format(host, port), 'zero-instance-app'): try: yield finally: server.shutdown()
genome21/dcos-cli
cli/tests/integrations/test_marathon.py
dcos/emitting.py
"""Support for Goal Zero Yeti Sensors.""" from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.const import CONF_NAME from . import YetiEntity from .const import BINARY_SENSOR_DICT, DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN PARALLEL_UPDATES = 0 async def async_setup_entry(hass, entry, async_add_entities): """Set up the Goal Zero Yeti sensor.""" name = entry.data[CONF_NAME] goalzero_data = hass.data[DOMAIN][entry.entry_id] sensors = [ YetiBinarySensor( goalzero_data[DATA_KEY_API], goalzero_data[DATA_KEY_COORDINATOR], name, sensor_name, entry.entry_id, ) for sensor_name in BINARY_SENSOR_DICT ] async_add_entities(sensors, True) class YetiBinarySensor(YetiEntity, BinarySensorEntity): """Representation of a Goal Zero Yeti sensor.""" def __init__(self, api, coordinator, name, sensor_name, server_unique_id): """Initialize a Goal Zero Yeti sensor.""" super().__init__(api, coordinator, name, server_unique_id) self._condition = sensor_name variable_info = BINARY_SENSOR_DICT[sensor_name] self._condition_name = variable_info[0] self._icon = variable_info[2] self._device_class = variable_info[1] @property def name(self): """Return the name of the sensor.""" return f"{self._name} {self._condition_name}" @property def unique_id(self): """Return the unique id of the sensor.""" return f"{self._server_unique_id}/{self._condition_name}" @property def is_on(self): """Return if the service is on.""" if self.api.data: return self.api.data[self._condition] == 1 return False @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/goalzero/binary_sensor.py
"""Describe group states.""" from homeassistant.components.group import GroupIntegrationRegistry from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import callback from homeassistant.helpers.typing import HomeAssistantType @callback def async_describe_on_off_states( hass: HomeAssistantType, registry: GroupIntegrationRegistry ) -> None: """Describe group on off states.""" registry.on_off_states({STATE_LOCKED}, STATE_UNLOCKED)
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/lock/group.py
"""Home Assistant representation of an UPnP/IGD.""" import asyncio from ipaddress import IPv4Address from typing import List, Mapping from async_upnp_client import UpnpFactory from async_upnp_client.aiohttp import AiohttpSessionRequester from async_upnp_client.profiles.igd import IgdDevice from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.dt as dt_util from .const import ( BYTES_RECEIVED, BYTES_SENT, CONF_LOCAL_IP, DISCOVERY_LOCATION, DISCOVERY_ST, DISCOVERY_UDN, DISCOVERY_USN, DOMAIN, DOMAIN_CONFIG, LOGGER as _LOGGER, PACKETS_RECEIVED, PACKETS_SENT, TIMESTAMP, ) class Device: """Home Assistant representation of an UPnP/IGD.""" def __init__(self, igd_device): """Initialize UPnP/IGD device.""" self._igd_device: IgdDevice = igd_device self._mapped_ports = [] @classmethod async def async_discover(cls, hass: HomeAssistantType) -> List[Mapping]: """Discover UPnP/IGD devices.""" _LOGGER.debug("Discovering UPnP/IGD devices") local_ip = None if DOMAIN in hass.data and DOMAIN_CONFIG in hass.data[DOMAIN]: local_ip = hass.data[DOMAIN][DOMAIN_CONFIG].get(CONF_LOCAL_IP) if local_ip: local_ip = IPv4Address(local_ip) discovery_infos = await IgdDevice.async_search(source_ip=local_ip, timeout=10) # add extra info and store devices devices = [] for discovery_info in discovery_infos: discovery_info[DISCOVERY_UDN] = discovery_info["_udn"] discovery_info[DISCOVERY_ST] = discovery_info["st"] discovery_info[DISCOVERY_LOCATION] = discovery_info["location"] usn = f"{discovery_info[DISCOVERY_UDN]}::{discovery_info[DISCOVERY_ST]}" discovery_info[DISCOVERY_USN] = usn _LOGGER.debug("Discovered device: %s", discovery_info) devices.append(discovery_info) return devices @classmethod async def async_create_device(cls, hass: HomeAssistantType, ssdp_location: str): """Create UPnP/IGD device.""" # build async_upnp_client requester session = async_get_clientsession(hass) requester = AiohttpSessionRequester(session, True, 10) # create async_upnp_client device factory = UpnpFactory(requester, disable_state_variable_validation=True) upnp_device = await factory.async_create_device(ssdp_location) igd_device = IgdDevice(upnp_device, None) return cls(igd_device) @property def udn(self) -> str: """Get the UDN.""" return self._igd_device.udn @property def name(self) -> str: """Get the name.""" return self._igd_device.name @property def manufacturer(self) -> str: """Get the manufacturer.""" return self._igd_device.manufacturer @property def model_name(self) -> str: """Get the model name.""" return self._igd_device.model_name @property def device_type(self) -> str: """Get the device type.""" return self._igd_device.device_type @property def unique_id(self) -> str: """Get the unique id.""" return f"{self.udn}::{self.device_type}" def __str__(self) -> str: """Get string representation.""" return f"IGD Device: {self.name}/{self.udn}" async def async_get_traffic_data(self) -> Mapping[str, any]: """ Get all traffic data in one go. Traffic data consists of: - total bytes sent - total bytes received - total packets sent - total packats received Data is timestamped. """ _LOGGER.debug("Getting traffic statistics from device: %s", self) values = await asyncio.gather( self._igd_device.async_get_total_bytes_received(), self._igd_device.async_get_total_bytes_sent(), self._igd_device.async_get_total_packets_received(), self._igd_device.async_get_total_packets_sent(), ) return { TIMESTAMP: dt_util.utcnow(), BYTES_RECEIVED: values[0], BYTES_SENT: values[1], PACKETS_RECEIVED: values[2], PACKETS_SENT: values[3], }
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/upnp/device.py
"""Config flow for UniFi.""" import socket import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from .const import ( CONF_ALLOW_BANDWIDTH_SENSORS, CONF_ALLOW_UPTIME_SENSORS, CONF_BLOCK_CLIENT, CONF_CONTROLLER, CONF_DETECTION_TIME, CONF_IGNORE_WIRED_BUG, CONF_POE_CLIENTS, CONF_SITE_ID, CONF_SSID_FILTER, CONF_TRACK_CLIENTS, CONF_TRACK_DEVICES, CONF_TRACK_WIRED_CLIENTS, CONTROLLER_ID, DEFAULT_POE_CLIENTS, DOMAIN as UNIFI_DOMAIN, LOGGER, ) from .controller import get_controller from .errors import AlreadyConfigured, AuthenticationRequired, CannotConnect DEFAULT_PORT = 8443 DEFAULT_SITE_ID = "default" DEFAULT_VERIFY_SSL = False @callback def get_controller_id_from_config_entry(config_entry): """Return controller with a matching bridge id.""" return CONTROLLER_ID.format( host=config_entry.data[CONF_CONTROLLER][CONF_HOST], site=config_entry.data[CONF_CONTROLLER][CONF_SITE_ID], ) class UnifiFlowHandler(config_entries.ConfigFlow, domain=UNIFI_DOMAIN): """Handle a UniFi config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return UnifiOptionsFlowHandler(config_entry) def __init__(self): """Initialize the UniFi flow.""" self.config = None self.desc = None self.sites = None async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input is not None: try: self.config = { CONF_HOST: user_input[CONF_HOST], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_PORT: user_input.get(CONF_PORT), CONF_VERIFY_SSL: user_input.get(CONF_VERIFY_SSL), CONF_SITE_ID: DEFAULT_SITE_ID, } controller = await get_controller(self.hass, **self.config) self.sites = await controller.sites() return await self.async_step_site() except AuthenticationRequired: errors["base"] = "faulty_credentials" except CannotConnect: errors["base"] = "service_unavailable" except Exception: # pylint: disable=broad-except LOGGER.error( "Unknown error connecting with UniFi Controller at %s", user_input[CONF_HOST], ) return self.async_abort(reason="unknown") host = "" if await async_discover_unifi(self.hass): host = "unifi" return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=host): str, vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool, } ), errors=errors, ) async def async_step_site(self, user_input=None): """Select site to control.""" errors = {} if user_input is not None: try: desc = user_input.get(CONF_SITE_ID, self.desc) for site in self.sites.values(): if desc == site["desc"]: self.config[CONF_SITE_ID] = site["name"] break for entry in self._async_current_entries(): controller = entry.data[CONF_CONTROLLER] if ( controller[CONF_HOST] == self.config[CONF_HOST] and controller[CONF_SITE_ID] == self.config[CONF_SITE_ID] ): raise AlreadyConfigured data = {CONF_CONTROLLER: self.config} return self.async_create_entry(title=desc, data=data) except AlreadyConfigured: return self.async_abort(reason="already_configured") if len(self.sites) == 1: self.desc = next(iter(self.sites.values()))["desc"] return await self.async_step_site(user_input={}) sites = [] for site in self.sites.values(): sites.append(site["desc"]) return self.async_show_form( step_id="site", data_schema=vol.Schema({vol.Required(CONF_SITE_ID): vol.In(sites)}), errors=errors, ) class UnifiOptionsFlowHandler(config_entries.OptionsFlow): """Handle Unifi options.""" def __init__(self, config_entry): """Initialize UniFi options flow.""" self.config_entry = config_entry self.options = dict(config_entry.options) self.controller = None async def async_step_init(self, user_input=None): """Manage the UniFi options.""" self.controller = self.hass.data[UNIFI_DOMAIN][self.config_entry.entry_id] self.options[CONF_BLOCK_CLIENT] = self.controller.option_block_clients if self.show_advanced_options: return await self.async_step_device_tracker() return await self.async_step_simple_options() async def async_step_simple_options(self, user_input=None): """For simple Jack.""" if user_input is not None: self.options.update(user_input) return await self._update_options() clients_to_block = {} for client in self.controller.api.clients.values(): clients_to_block[ client.mac ] = f"{client.name or client.hostname} ({client.mac})" return self.async_show_form( step_id="simple_options", data_schema=vol.Schema( { vol.Optional( CONF_TRACK_CLIENTS, default=self.controller.option_track_clients, ): bool, vol.Optional( CONF_TRACK_DEVICES, default=self.controller.option_track_devices, ): bool, vol.Optional( CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT] ): cv.multi_select(clients_to_block), } ), ) async def async_step_device_tracker(self, user_input=None): """Manage the device tracker options.""" if user_input is not None: self.options.update(user_input) return await self.async_step_client_control() ssids = ( set(self.controller.api.wlans) | { f"{wlan.name}{wlan.name_combine_suffix}" for wlan in self.controller.api.wlans.values() if not wlan.name_combine_enabled } | { wlan["name"] for ap in self.controller.api.devices.values() for wlan in ap.wlan_overrides if "name" in wlan } ) ssid_filter = {ssid: ssid for ssid in sorted(list(ssids))} return self.async_show_form( step_id="device_tracker", data_schema=vol.Schema( { vol.Optional( CONF_TRACK_CLIENTS, default=self.controller.option_track_clients, ): bool, vol.Optional( CONF_TRACK_WIRED_CLIENTS, default=self.controller.option_track_wired_clients, ): bool, vol.Optional( CONF_TRACK_DEVICES, default=self.controller.option_track_devices, ): bool, vol.Optional( CONF_SSID_FILTER, default=self.controller.option_ssid_filter ): cv.multi_select(ssid_filter), vol.Optional( CONF_DETECTION_TIME, default=int( self.controller.option_detection_time.total_seconds() ), ): int, vol.Optional( CONF_IGNORE_WIRED_BUG, default=self.controller.option_ignore_wired_bug, ): bool, } ), ) async def async_step_client_control(self, user_input=None): """Manage configuration of network access controlled clients.""" errors = {} if user_input is not None: self.options.update(user_input) return await self.async_step_statistics_sensors() clients_to_block = {} for client in self.controller.api.clients.values(): clients_to_block[ client.mac ] = f"{client.name or client.hostname} ({client.mac})" return self.async_show_form( step_id="client_control", data_schema=vol.Schema( { vol.Optional( CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT] ): cv.multi_select(clients_to_block), vol.Optional( CONF_POE_CLIENTS, default=self.options.get(CONF_POE_CLIENTS, DEFAULT_POE_CLIENTS), ): bool, } ), errors=errors, ) async def async_step_statistics_sensors(self, user_input=None): """Manage the statistics sensors options.""" if user_input is not None: self.options.update(user_input) return await self._update_options() return self.async_show_form( step_id="statistics_sensors", data_schema=vol.Schema( { vol.Optional( CONF_ALLOW_BANDWIDTH_SENSORS, default=self.controller.option_allow_bandwidth_sensors, ): bool, vol.Optional( CONF_ALLOW_UPTIME_SENSORS, default=self.controller.option_allow_uptime_sensors, ): bool, } ), ) async def _update_options(self): """Update config entry options.""" return self.async_create_entry(title="", data=self.options) async def async_discover_unifi(hass): """Discover UniFi address.""" try: return await hass.async_add_executor_job(socket.gethostbyname, "unifi") except socket.gaierror: return None
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/unifi/config_flow.py
"""Config flow for Logitech Squeezebox integration.""" import asyncio import logging from pysqueezebox import Server, async_discover import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, HTTP_UNAUTHORIZED, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession # pylint: disable=unused-import from .const import DEFAULT_PORT, DOMAIN _LOGGER = logging.getLogger(__name__) TIMEOUT = 5 def _base_schema(discovery_info=None): """Generate base schema.""" base_schema = {} if discovery_info and CONF_HOST in discovery_info: base_schema.update( { vol.Required( CONF_HOST, description={"suggested_value": discovery_info[CONF_HOST]}, ): str, } ) else: base_schema.update({vol.Required(CONF_HOST): str}) if discovery_info and CONF_PORT in discovery_info: base_schema.update( { vol.Required( CONF_PORT, default=DEFAULT_PORT, description={"suggested_value": discovery_info[CONF_PORT]}, ): int, } ) else: base_schema.update({vol.Required(CONF_PORT, default=DEFAULT_PORT): int}) base_schema.update( {vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str} ) return vol.Schema(base_schema) class SqueezeboxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Logitech Squeezebox.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize an instance of the squeezebox config flow.""" self.data_schema = _base_schema() self.discovery_info = None async def _discover(self, uuid=None): """Discover an unconfigured LMS server.""" self.discovery_info = None discovery_event = asyncio.Event() def _discovery_callback(server): if server.uuid: # ignore already configured uuids for entry in self._async_current_entries(): if entry.unique_id == server.uuid: return self.discovery_info = { CONF_HOST: server.host, CONF_PORT: server.port, "uuid": server.uuid, } _LOGGER.debug("Discovered server: %s", self.discovery_info) discovery_event.set() discovery_task = self.hass.async_create_task( async_discover(_discovery_callback) ) await discovery_event.wait() discovery_task.cancel() # stop searching as soon as we find server # update with suggested values from discovery self.data_schema = _base_schema(self.discovery_info) async def _validate_input(self, data): """ Validate the user input allows us to connect. Retrieve unique id and abort if already configured. """ server = Server( async_get_clientsession(self.hass), data[CONF_HOST], data[CONF_PORT], data.get(CONF_USERNAME), data.get(CONF_PASSWORD), ) try: status = await server.async_query("serverstatus") if not status: if server.http_status == HTTP_UNAUTHORIZED: return "invalid_auth" return "cannot_connect" except Exception: # pylint: disable=broad-except return "unknown" if "uuid" in status: await self.async_set_unique_id(status["uuid"]) self._abort_if_unique_id_configured() async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input and CONF_HOST in user_input: # update with host provided by user self.data_schema = _base_schema(user_input) return await self.async_step_edit() # no host specified, see if we can discover an unconfigured LMS server try: await asyncio.wait_for(self._discover(), timeout=TIMEOUT) return await self.async_step_edit() except asyncio.TimeoutError: errors["base"] = "no_server_found" # display the form return self.async_show_form( step_id="user", data_schema=vol.Schema({vol.Optional(CONF_HOST): str}), errors=errors, ) async def async_step_edit(self, user_input=None): """Edit a discovered or manually inputted server.""" errors = {} if user_input: error = await self._validate_input(user_input) if not error: return self.async_create_entry( title=user_input[CONF_HOST], data=user_input ) errors["base"] = error return self.async_show_form( step_id="edit", data_schema=self.data_schema, errors=errors ) async def async_step_import(self, config): """Import a config flow from configuration.""" error = await self._validate_input(config) if error: return self.async_abort(reason=error) return self.async_create_entry(title=config[CONF_HOST], data=config) async def async_step_discovery(self, discovery_info): """Handle discovery.""" _LOGGER.debug("Reached discovery flow with info: %s", discovery_info) if "uuid" in discovery_info: await self.async_set_unique_id(discovery_info.pop("uuid")) self._abort_if_unique_id_configured() else: # attempt to connect to server and determine uuid. will fail if password required error = await self._validate_input(discovery_info) if error: await self._async_handle_discovery_without_unique_id() # update schema with suggested values from discovery self.data_schema = _base_schema(discovery_info) # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context.update({"title_placeholders": {"host": discovery_info[CONF_HOST]}}) return await self.async_step_edit()
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/squeezebox/config_flow.py
"""Support for PoolSense binary sensors.""" from homeassistant.components.binary_sensor import ( DEVICE_CLASS_PROBLEM, BinarySensorEntity, ) from homeassistant.const import CONF_EMAIL from . import PoolSenseEntity from .const import DOMAIN BINARY_SENSORS = { "pH Status": { "unit": None, "icon": None, "name": "pH Status", "device_class": DEVICE_CLASS_PROBLEM, }, "Chlorine Status": { "unit": None, "icon": None, "name": "Chlorine Status", "device_class": DEVICE_CLASS_PROBLEM, }, } async def async_setup_entry(hass, config_entry, async_add_entities): """Defer sensor setup to the shared sensor module.""" coordinator = hass.data[DOMAIN][config_entry.entry_id] binary_sensors_list = [] for binary_sensor in BINARY_SENSORS: binary_sensors_list.append( PoolSenseBinarySensor( coordinator, config_entry.data[CONF_EMAIL], binary_sensor ) ) async_add_entities(binary_sensors_list, False) class PoolSenseBinarySensor(PoolSenseEntity, BinarySensorEntity): """Representation of PoolSense binary sensors.""" @property def is_on(self): """Return true if the binary sensor is on.""" return self.coordinator.data[self.info_type] == "red" @property def icon(self): """Return the icon.""" return BINARY_SENSORS[self.info_type]["icon"] @property def device_class(self): """Return the class of this device.""" return BINARY_SENSORS[self.info_type]["device_class"] @property def name(self): """Return the name of the binary sensor.""" return f"PoolSense {BINARY_SENSORS[self.info_type]['name']}"
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/poolsense/binary_sensor.py
"""Support for EnOcean switches.""" import voluptuous as vol from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import CONF_ID, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity from .device import EnOceanEntity CONF_CHANNEL = "channel" DEFAULT_NAME = "EnOcean Switch" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_CHANNEL, default=0): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the EnOcean switch platform.""" channel = config.get(CONF_CHANNEL) dev_id = config.get(CONF_ID) dev_name = config.get(CONF_NAME) add_entities([EnOceanSwitch(dev_id, dev_name, channel)]) class EnOceanSwitch(EnOceanEntity, ToggleEntity): """Representation of an EnOcean switch device.""" def __init__(self, dev_id, dev_name, channel): """Initialize the EnOcean switch device.""" super().__init__(dev_id, dev_name) self._light = None self._on_state = False self._on_state2 = False self.channel = channel @property def is_on(self): """Return whether the switch is on or off.""" return self._on_state @property def name(self): """Return the device name.""" return self.dev_name def turn_on(self, **kwargs): """Turn on the switch.""" optional = [0x03] optional.extend(self.dev_id) optional.extend([0xFF, 0x00]) self.send_command( data=[0xD2, 0x01, self.channel & 0xFF, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00], optional=optional, packet_type=0x01, ) self._on_state = True def turn_off(self, **kwargs): """Turn off the switch.""" optional = [0x03] optional.extend(self.dev_id) optional.extend([0xFF, 0x00]) self.send_command( data=[0xD2, 0x01, self.channel & 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], optional=optional, packet_type=0x01, ) self._on_state = False def value_changed(self, packet): """Update the internal state of the switch.""" if packet.data[0] == 0xA5: # power meter telegram, turn on if > 10 watts packet.parse_eep(0x12, 0x01) if packet.parsed["DT"]["raw_value"] == 1: raw_val = packet.parsed["MR"]["raw_value"] divisor = packet.parsed["DIV"]["raw_value"] watts = raw_val / (10 ** divisor) if watts > 1: self._on_state = True self.schedule_update_ha_state() elif packet.data[0] == 0xD2: # actuator status telegram packet.parse_eep(0x01, 0x01) if packet.parsed["CMD"]["raw_value"] == 4: channel = packet.parsed["IO"]["raw_value"] output = packet.parsed["OV"]["raw_value"] if channel == self.channel: self._on_state = output > 0 self.schedule_update_ha_state()
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/enocean/switch.py
"""Support for Agent.""" import asyncio from agent import AgentError from agent.a import Agent from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONNECTION, DOMAIN as AGENT_DOMAIN, SERVER_URL ATTRIBUTION = "ispyconnect.com" DEFAULT_BRAND = "Agent DVR by ispyconnect.com" FORWARDS = ["alarm_control_panel", "camera"] async def async_setup(hass, config): """Old way to set up integrations.""" return True async def async_setup_entry(hass, config_entry): """Set up the Agent component.""" hass.data.setdefault(AGENT_DOMAIN, {}) server_origin = config_entry.data[SERVER_URL] agent_client = Agent(server_origin, async_get_clientsession(hass)) try: await agent_client.update() except AgentError as err: await agent_client.close() raise ConfigEntryNotReady from err if not agent_client.is_available: raise ConfigEntryNotReady await agent_client.get_devices() hass.data[AGENT_DOMAIN][config_entry.entry_id] = {CONNECTION: agent_client} device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(AGENT_DOMAIN, agent_client.unique)}, manufacturer="iSpyConnect", name=f"Agent {agent_client.name}", model="Agent DVR", sw_version=agent_client.version, ) for forward in FORWARDS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, forward) ) return True async def async_unload_entry(hass, config_entry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, forward) for forward in FORWARDS ] ) ) await hass.data[AGENT_DOMAIN][config_entry.entry_id][CONNECTION].close() if unload_ok: hass.data[AGENT_DOMAIN].pop(config_entry.entry_id) return unload_ok
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/agent_dvr/__init__.py
"""The cert_expiry component.""" from datetime import datetime, timedelta import logging from typing import Optional from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.typing import HomeAssistantType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DEFAULT_PORT, DOMAIN from .errors import TemporaryFailure, ValidationFailure from .helper import get_cert_expiry_timestamp _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(hours=12) async def async_setup(hass, config): """Platform setup, do nothing.""" return True async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Load the saved entities.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] coordinator = CertExpiryDataUpdateCoordinator(hass, host, port) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator if entry.unique_id is None: hass.config_entries.async_update_entry(entry, unique_id=f"{host}:{port}") hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor") ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.config_entries.async_forward_entry_unload(entry, "sensor") class CertExpiryDataUpdateCoordinator(DataUpdateCoordinator[datetime]): """Class to manage fetching Cert Expiry data from single endpoint.""" def __init__(self, hass, host, port): """Initialize global Cert Expiry data updater.""" self.host = host self.port = port self.cert_error = None self.is_cert_valid = False display_port = f":{port}" if port != DEFAULT_PORT else "" name = f"{self.host}{display_port}" super().__init__( hass, _LOGGER, name=name, update_interval=SCAN_INTERVAL, ) async def _async_update_data(self) -> Optional[datetime]: """Fetch certificate.""" try: timestamp = await get_cert_expiry_timestamp(self.hass, self.host, self.port) except TemporaryFailure as err: raise UpdateFailed(err.args[0]) from err except ValidationFailure as err: self.cert_error = err self.is_cert_valid = False _LOGGER.error("Certificate validation error: %s [%s]", self.host, err) return None self.cert_error = None self.is_cert_valid = True return timestamp
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/cert_expiry/__init__.py
"""Support for consuming values for the Volkszaehler API.""" from datetime import timedelta import logging from volkszaehler import Volkszaehler from volkszaehler.exceptions import VolkszaehlerApiConnectionError import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_HOST, CONF_MONITORED_CONDITIONS, CONF_NAME, CONF_PORT, ENERGY_WATT_HOUR, POWER_WATT, ) from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) CONF_UUID = "uuid" DEFAULT_HOST = "localhost" DEFAULT_NAME = "Volkszaehler" DEFAULT_PORT = 80 MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) SENSOR_TYPES = { "average": ["Average", POWER_WATT, "mdi:power-off"], "consumption": ["Consumption", ENERGY_WATT_HOUR, "mdi:power-plug"], "max": ["Max", POWER_WATT, "mdi:arrow-up"], "min": ["Min", POWER_WATT, "mdi:arrow-down"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_UUID): cv.string, vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_MONITORED_CONDITIONS, default=["average"]): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Volkszaehler sensors.""" host = config[CONF_HOST] name = config[CONF_NAME] port = config[CONF_PORT] uuid = config[CONF_UUID] conditions = config[CONF_MONITORED_CONDITIONS] session = async_get_clientsession(hass) vz_api = VolkszaehlerData( Volkszaehler(hass.loop, session, uuid, host=host, port=port) ) await vz_api.async_update() if vz_api.api.data is None: raise PlatformNotReady dev = [] for condition in conditions: dev.append(VolkszaehlerSensor(vz_api, name, condition)) async_add_entities(dev, True) class VolkszaehlerSensor(Entity): """Implementation of a Volkszaehler sensor.""" def __init__(self, vz_api, name, sensor_type): """Initialize the Volkszaehler sensor.""" self.vz_api = vz_api self._name = name self.type = sensor_type self._state = None @property def name(self): """Return the name of the sensor.""" return "{} {}".format(self._name, SENSOR_TYPES[self.type][0]) @property def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2] @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return SENSOR_TYPES[self.type][1] @property def available(self): """Could the device be accessed during the last update call.""" return self.vz_api.available @property def state(self): """Return the state of the resources.""" return self._state async def async_update(self): """Get the latest data from REST API.""" await self.vz_api.async_update() if self.vz_api.api.data is not None: self._state = round(getattr(self.vz_api.api, self.type), 2) class VolkszaehlerData: """The class for handling the data retrieval from the Volkszaehler API.""" def __init__(self, api): """Initialize the data object.""" self.api = api self.available = True @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from the Volkszaehler REST API.""" try: await self.api.get_data() self.available = True except VolkszaehlerApiConnectionError: _LOGGER.error("Unable to fetch data from the Volkszaehler API") self.available = False
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/volkszaehler/sensor.py
"""Device tracker constants.""" from datetime import timedelta import logging LOGGER = logging.getLogger(__package__) DOMAIN = "device_tracker" PLATFORM_TYPE_LEGACY = "legacy" PLATFORM_TYPE_ENTITY = "entity_platform" SOURCE_TYPE_GPS = "gps" SOURCE_TYPE_ROUTER = "router" SOURCE_TYPE_BLUETOOTH = "bluetooth" SOURCE_TYPE_BLUETOOTH_LE = "bluetooth_le" CONF_SCAN_INTERVAL = "interval_seconds" SCAN_INTERVAL = timedelta(seconds=12) CONF_TRACK_NEW = "track_new_devices" DEFAULT_TRACK_NEW = True CONF_CONSIDER_HOME = "consider_home" DEFAULT_CONSIDER_HOME = timedelta(seconds=180) CONF_NEW_DEVICE_DEFAULTS = "new_device_defaults" ATTR_ATTRIBUTES = "attributes" ATTR_BATTERY = "battery" ATTR_DEV_ID = "dev_id" ATTR_GPS = "gps" ATTR_HOST_NAME = "host_name" ATTR_LOCATION_NAME = "location_name" ATTR_MAC = "mac" ATTR_SOURCE_TYPE = "source_type" ATTR_CONSIDER_HOME = "consider_home"
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/device_tracker/const.py
"""The Netatmo data handler.""" from collections import deque from datetime import timedelta from functools import partial from itertools import islice import logging from time import time from typing import Deque, Dict, List import pyatmo from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import async_track_time_interval from .const import AUTH, DOMAIN, MANUFACTURER _LOGGER = logging.getLogger(__name__) CAMERA_DATA_CLASS_NAME = "CameraData" WEATHERSTATION_DATA_CLASS_NAME = "WeatherStationData" HOMECOACH_DATA_CLASS_NAME = "HomeCoachData" HOMEDATA_DATA_CLASS_NAME = "HomeData" HOMESTATUS_DATA_CLASS_NAME = "HomeStatus" PUBLICDATA_DATA_CLASS_NAME = "PublicData" NEXT_SCAN = "next_scan" DATA_CLASSES = { WEATHERSTATION_DATA_CLASS_NAME: pyatmo.WeatherStationData, HOMECOACH_DATA_CLASS_NAME: pyatmo.HomeCoachData, CAMERA_DATA_CLASS_NAME: pyatmo.CameraData, HOMEDATA_DATA_CLASS_NAME: pyatmo.HomeData, HOMESTATUS_DATA_CLASS_NAME: pyatmo.HomeStatus, PUBLICDATA_DATA_CLASS_NAME: pyatmo.PublicData, } BATCH_SIZE = 3 DEFAULT_INTERVALS = { HOMEDATA_DATA_CLASS_NAME: 900, HOMESTATUS_DATA_CLASS_NAME: 300, CAMERA_DATA_CLASS_NAME: 900, WEATHERSTATION_DATA_CLASS_NAME: 600, HOMECOACH_DATA_CLASS_NAME: 300, PUBLICDATA_DATA_CLASS_NAME: 600, } SCAN_INTERVAL = 60 class NetatmoDataHandler: """Manages the Netatmo data handling.""" def __init__(self, hass: HomeAssistant, entry: ConfigEntry): """Initialize self.""" self.hass = hass self._auth = hass.data[DOMAIN][entry.entry_id][AUTH] self.listeners: List[CALLBACK_TYPE] = [] self._data_classes: Dict = {} self.data = {} self._queue: Deque = deque() self._webhook: bool = False async def async_setup(self): """Set up the Netatmo data handler.""" async_track_time_interval( self.hass, self.async_update, timedelta(seconds=SCAN_INTERVAL) ) self.listeners.append( async_dispatcher_connect( self.hass, f"signal-{DOMAIN}-webhook-None", self.handle_event, ) ) async def async_update(self, event_time): """ Update device. We do up to BATCH_SIZE calls in one update in order to minimize the calls on the api service. """ for data_class in islice(self._queue, 0, BATCH_SIZE): if data_class[NEXT_SCAN] > time(): continue self._data_classes[data_class["name"]][NEXT_SCAN] = ( time() + data_class["interval"] ) await self.async_fetch_data( data_class["class"], data_class["name"], **data_class["kwargs"] ) self._queue.rotate(BATCH_SIZE) async def async_cleanup(self): """Clean up the Netatmo data handler.""" for listener in self.listeners: listener() async def handle_event(self, event): """Handle webhook events.""" if event["data"]["push_type"] == "webhook_activation": _LOGGER.info("%s webhook successfully registered", MANUFACTURER) self._webhook = True elif event["data"]["push_type"] == "NACamera-connection": _LOGGER.debug("%s camera reconnected", MANUFACTURER) self._data_classes[CAMERA_DATA_CLASS_NAME][NEXT_SCAN] = time() async def async_fetch_data(self, data_class, data_class_entry, **kwargs): """Fetch data and notify.""" try: self.data[data_class_entry] = await self.hass.async_add_executor_job( partial(data_class, **kwargs), self._auth, ) for update_callback in self._data_classes[data_class_entry][ "subscriptions" ]: if update_callback: update_callback() except (pyatmo.NoDevice, pyatmo.ApiError) as err: _LOGGER.debug(err) async def register_data_class( self, data_class_name, data_class_entry, update_callback, **kwargs ): """Register data class.""" if data_class_entry in self._data_classes: self._data_classes[data_class_entry]["subscriptions"].append( update_callback ) return self._data_classes[data_class_entry] = { "class": DATA_CLASSES[data_class_name], "name": data_class_entry, "interval": DEFAULT_INTERVALS[data_class_name], NEXT_SCAN: time() + DEFAULT_INTERVALS[data_class_name], "kwargs": kwargs, "subscriptions": [update_callback], } await self.async_fetch_data( DATA_CLASSES[data_class_name], data_class_entry, **kwargs ) self._queue.append(self._data_classes[data_class_entry]) _LOGGER.debug("Data class %s added", data_class_entry) async def unregister_data_class(self, data_class_entry, update_callback): """Unregister data class.""" if update_callback not in self._data_classes[data_class_entry]["subscriptions"]: return self._data_classes[data_class_entry]["subscriptions"].remove(update_callback) if not self._data_classes[data_class_entry].get("subscriptions"): self._queue.remove(self._data_classes[data_class_entry]) self._data_classes.pop(data_class_entry) _LOGGER.debug("Data class %s removed", data_class_entry) @property def webhook(self) -> bool: """Return the webhook state.""" return self._webhook
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/netatmo/data_handler.py
"""Support for Enviro pHAT sensors.""" from datetime import timedelta import importlib import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_DISPLAY_OPTIONS, CONF_NAME, PRESSURE_HPA, TEMP_CELSIUS, VOLT, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "envirophat" CONF_USE_LEDS = "use_leds" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) SENSOR_TYPES = { "light": ["light", " ", "mdi:weather-sunny"], "light_red": ["light_red", " ", "mdi:invert-colors"], "light_green": ["light_green", " ", "mdi:invert-colors"], "light_blue": ["light_blue", " ", "mdi:invert-colors"], "accelerometer_x": ["accelerometer_x", "G", "mdi:earth"], "accelerometer_y": ["accelerometer_y", "G", "mdi:earth"], "accelerometer_z": ["accelerometer_z", "G", "mdi:earth"], "magnetometer_x": ["magnetometer_x", " ", "mdi:magnet"], "magnetometer_y": ["magnetometer_y", " ", "mdi:magnet"], "magnetometer_z": ["magnetometer_z", " ", "mdi:magnet"], "temperature": ["temperature", TEMP_CELSIUS, "mdi:thermometer"], "pressure": ["pressure", PRESSURE_HPA, "mdi:gauge"], "voltage_0": ["voltage_0", VOLT, "mdi:flash"], "voltage_1": ["voltage_1", VOLT, "mdi:flash"], "voltage_2": ["voltage_2", VOLT, "mdi:flash"], "voltage_3": ["voltage_3", VOLT, "mdi:flash"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_DISPLAY_OPTIONS, default=list(SENSOR_TYPES)): [ vol.In(SENSOR_TYPES) ], vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_USE_LEDS, default=False): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Sense HAT sensor platform.""" try: envirophat = importlib.import_module("envirophat") except OSError: _LOGGER.error("No Enviro pHAT was found") return False data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS)) dev = [] for variable in config[CONF_DISPLAY_OPTIONS]: dev.append(EnvirophatSensor(data, variable)) add_entities(dev, True) class EnvirophatSensor(Entity): """Representation of an Enviro pHAT sensor.""" def __init__(self, data, sensor_types): """Initialize the sensor.""" self.data = data self._name = SENSOR_TYPES[sensor_types][0] self._unit_of_measurement = SENSOR_TYPES[sensor_types][1] self.type = sensor_types self._state = None @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2] @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement def update(self): """Get the latest data and updates the states.""" self.data.update() if self.type == "light": self._state = self.data.light if self.type == "light_red": self._state = self.data.light_red if self.type == "light_green": self._state = self.data.light_green if self.type == "light_blue": self._state = self.data.light_blue if self.type == "accelerometer_x": self._state = self.data.accelerometer_x if self.type == "accelerometer_y": self._state = self.data.accelerometer_y if self.type == "accelerometer_z": self._state = self.data.accelerometer_z if self.type == "magnetometer_x": self._state = self.data.magnetometer_x if self.type == "magnetometer_y": self._state = self.data.magnetometer_y if self.type == "magnetometer_z": self._state = self.data.magnetometer_z if self.type == "temperature": self._state = self.data.temperature if self.type == "pressure": self._state = self.data.pressure if self.type == "voltage_0": self._state = self.data.voltage_0 if self.type == "voltage_1": self._state = self.data.voltage_1 if self.type == "voltage_2": self._state = self.data.voltage_2 if self.type == "voltage_3": self._state = self.data.voltage_3 class EnvirophatData: """Get the latest data and update.""" def __init__(self, envirophat, use_leds): """Initialize the data object.""" self.envirophat = envirophat self.use_leds = use_leds # sensors readings self.light = None self.light_red = None self.light_green = None self.light_blue = None self.accelerometer_x = None self.accelerometer_y = None self.accelerometer_z = None self.magnetometer_x = None self.magnetometer_y = None self.magnetometer_z = None self.temperature = None self.pressure = None self.voltage_0 = None self.voltage_1 = None self.voltage_2 = None self.voltage_3 = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from Enviro pHAT.""" # Light sensor reading: 16-bit integer self.light = self.envirophat.light.light() if self.use_leds: self.envirophat.leds.on() # the three color values scaled against the overall light, 0-255 self.light_red, self.light_green, self.light_blue = self.envirophat.light.rgb() if self.use_leds: self.envirophat.leds.off() # accelerometer readings in G ( self.accelerometer_x, self.accelerometer_y, self.accelerometer_z, ) = self.envirophat.motion.accelerometer() # raw magnetometer reading ( self.magnetometer_x, self.magnetometer_y, self.magnetometer_z, ) = self.envirophat.motion.magnetometer() # temperature resolution of BMP280 sensor: 0.01°C self.temperature = round(self.envirophat.weather.temperature(), 2) # pressure resolution of BMP280 sensor: 0.16 Pa, rounding to 0.1 Pa # with conversion to 100 Pa = 1 hPa self.pressure = round(self.envirophat.weather.pressure() / 100.0, 3) # Voltage sensor, reading between 0-3.3V ( self.voltage_0, self.voltage_1, self.voltage_2, self.voltage_3, ) = self.envirophat.analog.read_all()
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/envirophat/sensor.py
"""Switch platform integration for Numato USB GPIO expanders.""" import logging from numato_gpio import NumatoGpioError from homeassistant.const import ( CONF_DEVICES, CONF_ID, CONF_SWITCHES, DEVICE_DEFAULT_NAME, ) from homeassistant.helpers.entity import ToggleEntity from . import CONF_INVERT_LOGIC, CONF_PORTS, DATA_API, DOMAIN _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the configured Numato USB GPIO switch ports.""" if discovery_info is None: return api = hass.data[DOMAIN][DATA_API] switches = [] devices = hass.data[DOMAIN][CONF_DEVICES] for device in [d for d in devices if CONF_SWITCHES in d]: device_id = device[CONF_ID] platform = device[CONF_SWITCHES] invert_logic = platform[CONF_INVERT_LOGIC] ports = platform[CONF_PORTS] for port, port_name in ports.items(): try: api.setup_output(device_id, port) api.write_output(device_id, port, 1 if invert_logic else 0) except NumatoGpioError as err: _LOGGER.error( "Failed to initialize switch '%s' on Numato device %s port %s: %s", port_name, device_id, port, err, ) continue switches.append( NumatoGpioSwitch( port_name, device_id, port, invert_logic, api, ) ) add_entities(switches, True) class NumatoGpioSwitch(ToggleEntity): """Representation of a Numato USB GPIO switch port.""" def __init__(self, name, device_id, port, invert_logic, api): """Initialize the port.""" self._name = name or DEVICE_DEFAULT_NAME self._device_id = device_id self._port = port self._invert_logic = invert_logic self._state = False self._api = api @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if port is turned on.""" return self._state def turn_on(self, **kwargs): """Turn the port on.""" try: self._api.write_output( self._device_id, self._port, 0 if self._invert_logic else 1 ) self._state = True self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn on Numato device %s port %s: %s", self._device_id, self._port, err, ) def turn_off(self, **kwargs): """Turn the port off.""" try: self._api.write_output( self._device_id, self._port, 1 if self._invert_logic else 0 ) self._state = False self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn off Numato device %s port %s: %s", self._device_id, self._port, err, )
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/numato/switch.py
"""Tracking for bluetooth devices.""" import asyncio import logging from typing import List, Optional, Set, Tuple # pylint: disable=import-error import bluetooth from bt_proximity import BluetoothRSSI import voluptuous as vol from homeassistant.components.device_tracker import PLATFORM_SCHEMA from homeassistant.components.device_tracker.const import ( CONF_SCAN_INTERVAL, CONF_TRACK_NEW, DEFAULT_TRACK_NEW, SCAN_INTERVAL, SOURCE_TYPE_BLUETOOTH, ) from homeassistant.components.device_tracker.legacy import ( YAML_DEVICES, async_load_config, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import HomeAssistantType from .const import DOMAIN, SERVICE_UPDATE _LOGGER = logging.getLogger(__name__) BT_PREFIX = "BT_" CONF_REQUEST_RSSI = "request_rssi" CONF_DEVICE_ID = "device_id" DEFAULT_DEVICE_ID = -1 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_TRACK_NEW): cv.boolean, vol.Optional(CONF_REQUEST_RSSI): cv.boolean, vol.Optional(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): vol.All( vol.Coerce(int), vol.Range(min=-1) ), } ) def is_bluetooth_device(device) -> bool: """Check whether a device is a bluetooth device by its mac.""" return device.mac and device.mac[:3].upper() == BT_PREFIX def discover_devices(device_id: int) -> List[Tuple[str, str]]: """Discover Bluetooth devices.""" result = bluetooth.discover_devices( duration=8, lookup_names=True, flush_cache=True, lookup_class=False, device_id=device_id, ) _LOGGER.debug("Bluetooth devices discovered = %d", len(result)) return result async def see_device( hass: HomeAssistantType, async_see, mac: str, device_name: str, rssi=None ) -> None: """Mark a device as seen.""" attributes = {} if rssi is not None: attributes["rssi"] = rssi await async_see( mac=f"{BT_PREFIX}{mac}", host_name=device_name, attributes=attributes, source_type=SOURCE_TYPE_BLUETOOTH, ) async def get_tracking_devices(hass: HomeAssistantType) -> Tuple[Set[str], Set[str]]: """ Load all known devices. We just need the devices so set consider_home and home range to 0 """ yaml_path: str = hass.config.path(YAML_DEVICES) devices = await async_load_config(yaml_path, hass, 0) bluetooth_devices = [device for device in devices if is_bluetooth_device(device)] devices_to_track: Set[str] = { device.mac[3:] for device in bluetooth_devices if device.track } devices_to_not_track: Set[str] = { device.mac[3:] for device in bluetooth_devices if not device.track } return devices_to_track, devices_to_not_track def lookup_name(mac: str) -> Optional[str]: """Lookup a Bluetooth device name.""" _LOGGER.debug("Scanning %s", mac) return bluetooth.lookup_name(mac, timeout=5) async def async_setup_scanner( hass: HomeAssistantType, config: dict, async_see, discovery_info=None ): """Set up the Bluetooth Scanner.""" device_id: int = config[CONF_DEVICE_ID] interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) request_rssi = config.get(CONF_REQUEST_RSSI, False) update_bluetooth_lock = asyncio.Lock() # If track new devices is true discover new devices on startup. track_new: bool = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW) _LOGGER.debug("Tracking new devices is set to %s", track_new) devices_to_track, devices_to_not_track = await get_tracking_devices(hass) if not devices_to_track and not track_new: _LOGGER.debug("No Bluetooth devices to track and not tracking new devices") if request_rssi: _LOGGER.debug("Detecting RSSI for devices") async def perform_bluetooth_update(): """Discover Bluetooth devices and update status.""" _LOGGER.debug("Performing Bluetooth devices discovery and update") tasks = [] try: if track_new: devices = await hass.async_add_executor_job(discover_devices, device_id) for mac, device_name in devices: if mac not in devices_to_track and mac not in devices_to_not_track: devices_to_track.add(mac) for mac in devices_to_track: device_name = await hass.async_add_executor_job(lookup_name, mac) if device_name is None: # Could not lookup device name continue rssi = None if request_rssi: client = BluetoothRSSI(mac) rssi = await hass.async_add_executor_job(client.request_rssi) client.close() tasks.append(see_device(hass, async_see, mac, device_name, rssi)) if tasks: await asyncio.wait(tasks) except bluetooth.BluetoothError: _LOGGER.exception("Error looking up Bluetooth device") async def update_bluetooth(now=None): """Lookup Bluetooth devices and update status.""" # If an update is in progress, we don't do anything if update_bluetooth_lock.locked(): _LOGGER.debug( "Previous execution of update_bluetooth is taking longer than the scheduled update of interval %s", interval, ) return async with update_bluetooth_lock: await perform_bluetooth_update() async def handle_manual_update_bluetooth(call): """Update bluetooth devices on demand.""" await update_bluetooth() hass.async_create_task(update_bluetooth()) async_track_time_interval(hass, update_bluetooth, interval) hass.services.async_register(DOMAIN, SERVICE_UPDATE, handle_manual_update_bluetooth) return True
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/bluetooth_tracker/device_tracker.py
"""Config flow for DialogFlow.""" from homeassistant.helpers import config_entry_flow from .const import DOMAIN config_entry_flow.register_webhook_flow( DOMAIN, "Dialogflow Webhook", { "dialogflow_url": "https://dialogflow.com/docs/fulfillment#webhook", "docs_url": "https://www.home-assistant.io/integrations/dialogflow/", }, )
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/dialogflow/config_flow.py
"""Support for the DirecTV receivers.""" import logging from typing import Callable, List, Optional from directv import DIRECTV from homeassistant.components.media_player import ( DEVICE_CLASS_RECEIVER, MediaPlayerEntity, ) from homeassistant.components.media_player.const import ( MEDIA_TYPE_CHANNEL, MEDIA_TYPE_MOVIE, MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_OFF, STATE_PAUSED, STATE_PLAYING from homeassistant.helpers.typing import HomeAssistantType from homeassistant.util import dt as dt_util from . import DIRECTVEntity from .const import ( ATTR_MEDIA_CURRENTLY_RECORDING, ATTR_MEDIA_RATING, ATTR_MEDIA_RECORDED, ATTR_MEDIA_START_TIME, DOMAIN, ) _LOGGER = logging.getLogger(__name__) KNOWN_MEDIA_TYPES = [MEDIA_TYPE_MOVIE, MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW] SUPPORT_DTV = ( SUPPORT_PAUSE | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PLAY_MEDIA | SUPPORT_STOP | SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK | SUPPORT_PLAY ) SUPPORT_DTV_CLIENT = ( SUPPORT_PAUSE | SUPPORT_PLAY_MEDIA | SUPPORT_STOP | SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK | SUPPORT_PLAY ) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities: Callable[[List, bool], None], ) -> bool: """Set up the DirecTV config entry.""" dtv = hass.data[DOMAIN][entry.entry_id] entities = [] for location in dtv.device.locations: entities.append( DIRECTVMediaPlayer( dtv=dtv, name=str.title(location.name), address=location.address, ) ) async_add_entities(entities, True) class DIRECTVMediaPlayer(DIRECTVEntity, MediaPlayerEntity): """Representation of a DirecTV receiver on the network.""" def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None: """Initialize DirecTV media player.""" super().__init__( dtv=dtv, name=name, address=address, ) self._assumed_state = None self._available = False self._is_recorded = None self._is_standby = True self._last_position = None self._last_update = None self._paused = None self._program = None self._state = None async def async_update(self): """Retrieve latest state.""" self._state = await self.dtv.state(self._address) self._available = self._state.available self._is_standby = self._state.standby self._program = self._state.program if self._is_standby: self._assumed_state = False self._is_recorded = None self._last_position = None self._last_update = None self._paused = None elif self._program is not None: self._paused = self._last_position == self._program.position self._is_recorded = self._program.recorded self._last_position = self._program.position self._last_update = self._state.at self._assumed_state = self._is_recorded @property def device_state_attributes(self): """Return device specific state attributes.""" if self._is_standby: return {} return { ATTR_MEDIA_CURRENTLY_RECORDING: self.media_currently_recording, ATTR_MEDIA_RATING: self.media_rating, ATTR_MEDIA_RECORDED: self.media_recorded, ATTR_MEDIA_START_TIME: self.media_start_time, } @property def name(self): """Return the name of the device.""" return self._name @property def device_class(self) -> Optional[str]: """Return the class of this device.""" return DEVICE_CLASS_RECEIVER @property def unique_id(self): """Return a unique ID to use for this media player.""" if self._address == "0": return self.dtv.device.info.receiver_id return self._address # MediaPlayerEntity properties and methods @property def state(self): """Return the state of the device.""" if self._is_standby: return STATE_OFF # For recorded media we can determine if it is paused or not. # For live media we're unable to determine and will always return # playing instead. if self._paused: return STATE_PAUSED return STATE_PLAYING @property def available(self): """Return if able to retrieve information from DVR or not.""" return self._available @property def assumed_state(self): """Return if we assume the state or not.""" return self._assumed_state @property def media_content_id(self): """Return the content ID of current playing media.""" if self._is_standby or self._program is None: return None return self._program.program_id @property def media_content_type(self): """Return the content type of current playing media.""" if self._is_standby or self._program is None: return None if self._program.program_type in KNOWN_MEDIA_TYPES: return self._program.program_type return MEDIA_TYPE_MOVIE @property def media_duration(self): """Return the duration of current playing media in seconds.""" if self._is_standby or self._program is None: return None return self._program.duration @property def media_position(self): """Position of current playing media in seconds.""" if self._is_standby: return None return self._last_position @property def media_position_updated_at(self): """When was the position of the current playing media valid.""" if self._is_standby: return None return self._last_update @property def media_title(self): """Return the title of current playing media.""" if self._is_standby or self._program is None: return None if self.media_content_type == MEDIA_TYPE_MUSIC: return self._program.music_title return self._program.title @property def media_artist(self): """Artist of current playing media, music track only.""" if self._is_standby or self._program is None: return None return self._program.music_artist @property def media_album_name(self): """Album name of current playing media, music track only.""" if self._is_standby or self._program is None: return None return self._program.music_album @property def media_series_title(self): """Return the title of current episode of TV show.""" if self._is_standby or self._program is None: return None return self._program.episode_title @property def media_channel(self): """Return the channel current playing media.""" if self._is_standby or self._program is None: return None return f"{self._program.channel_name} ({self._program.channel})" @property def source(self): """Name of the current input source.""" if self._is_standby or self._program is None: return None return self._program.channel @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_DTV_CLIENT if self._is_client else SUPPORT_DTV @property def media_currently_recording(self): """If the media is currently being recorded or not.""" if self._is_standby or self._program is None: return None return self._program.recording @property def media_rating(self): """TV Rating of the current playing media.""" if self._is_standby or self._program is None: return None return self._program.rating @property def media_recorded(self): """If the media was recorded or live.""" if self._is_standby: return None return self._is_recorded @property def media_start_time(self): """Start time the program aired.""" if self._is_standby or self._program is None: return None return dt_util.as_local(self._program.start_time) async def async_turn_on(self): """Turn on the receiver.""" if self._is_client: raise NotImplementedError() _LOGGER.debug("Turn on %s", self._name) await self.dtv.remote("poweron", self._address) async def async_turn_off(self): """Turn off the receiver.""" if self._is_client: raise NotImplementedError() _LOGGER.debug("Turn off %s", self._name) await self.dtv.remote("poweroff", self._address) async def async_media_play(self): """Send play command.""" _LOGGER.debug("Play on %s", self._name) await self.dtv.remote("play", self._address) async def async_media_pause(self): """Send pause command.""" _LOGGER.debug("Pause on %s", self._name) await self.dtv.remote("pause", self._address) async def async_media_stop(self): """Send stop command.""" _LOGGER.debug("Stop on %s", self._name) await self.dtv.remote("stop", self._address) async def async_media_previous_track(self): """Send rewind command.""" _LOGGER.debug("Rewind on %s", self._name) await self.dtv.remote("rew", self._address) async def async_media_next_track(self): """Send fast forward command.""" _LOGGER.debug("Fast forward on %s", self._name) await self.dtv.remote("ffwd", self._address) async def async_play_media(self, media_type, media_id, **kwargs): """Select input source.""" if media_type != MEDIA_TYPE_CHANNEL: _LOGGER.error( "Invalid media type %s. Only %s is supported", media_type, MEDIA_TYPE_CHANNEL, ) return _LOGGER.debug("Changing channel on %s to %s", self._name, media_id) await self.dtv.tune(media_id, self._address)
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/directv/media_player.py
"""Component for interacting with the Yale Smart Alarm System API.""" import logging import voluptuous as vol from yalesmartalarmclient.client import ( YALE_STATE_ARM_FULL, YALE_STATE_ARM_PARTIAL, YALE_STATE_DISARM, AuthenticationError, YaleSmartAlarmClient, ) from homeassistant.components.alarm_control_panel import ( PLATFORM_SCHEMA, AlarmControlPanelEntity, ) from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, ) from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, CONF_USERNAME, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, ) import homeassistant.helpers.config_validation as cv CONF_AREA_ID = "area_id" DEFAULT_NAME = "Yale Smart Alarm" DEFAULT_AREA_ID = "1" _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_AREA_ID, default=DEFAULT_AREA_ID): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the alarm platform.""" name = config[CONF_NAME] username = config[CONF_USERNAME] password = config[CONF_PASSWORD] area_id = config[CONF_AREA_ID] try: client = YaleSmartAlarmClient(username, password, area_id) except AuthenticationError: _LOGGER.error("Authentication failed. Check credentials") return add_entities([YaleAlarmDevice(name, client)], True) class YaleAlarmDevice(AlarmControlPanelEntity): """Represent a Yale Smart Alarm.""" def __init__(self, name, client): """Initialize the Yale Alarm Device.""" self._name = name self._client = client self._state = None self._state_map = { YALE_STATE_DISARM: STATE_ALARM_DISARMED, YALE_STATE_ARM_PARTIAL: STATE_ALARM_ARMED_HOME, YALE_STATE_ARM_FULL: STATE_ALARM_ARMED_AWAY, } @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the state of the device.""" return self._state @property def supported_features(self) -> int: """Return the list of supported features.""" return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY def update(self): """Return the state of the device.""" armed_status = self._client.get_armed_status() self._state = self._state_map.get(armed_status) def alarm_disarm(self, code=None): """Send disarm command.""" self._client.disarm() def alarm_arm_home(self, code=None): """Send arm home command.""" self._client.arm_partial() def alarm_arm_away(self, code=None): """Send arm away command.""" self._client.arm_full()
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/yale_smart_alarm/alarm_control_panel.py
"""Support for LCN climate control.""" import pypck from homeassistant.components.climate import ClimateEntity, const from homeassistant.const import ATTR_TEMPERATURE, CONF_ADDRESS, CONF_UNIT_OF_MEASUREMENT from . import LcnDevice from .const import ( CONF_CONNECTIONS, CONF_LOCKABLE, CONF_MAX_TEMP, CONF_MIN_TEMP, CONF_SETPOINT, CONF_SOURCE, DATA_LCN, ) from .helpers import get_connection async def async_setup_platform( hass, hass_config, async_add_entities, discovery_info=None ): """Set up the LCN climate platform.""" if discovery_info is None: return devices = [] for config in discovery_info: address, connection_id = config[CONF_ADDRESS] addr = pypck.lcn_addr.LcnAddr(*address) connections = hass.data[DATA_LCN][CONF_CONNECTIONS] connection = get_connection(connections, connection_id) address_connection = connection.get_address_conn(addr) devices.append(LcnClimate(config, address_connection)) async_add_entities(devices) class LcnClimate(LcnDevice, ClimateEntity): """Representation of a LCN climate device.""" def __init__(self, config, address_connection): """Initialize of a LCN climate device.""" super().__init__(config, address_connection) self.variable = pypck.lcn_defs.Var[config[CONF_SOURCE]] self.setpoint = pypck.lcn_defs.Var[config[CONF_SETPOINT]] self.unit = pypck.lcn_defs.VarUnit.parse(config[CONF_UNIT_OF_MEASUREMENT]) self.regulator_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint) self.is_lockable = config[CONF_LOCKABLE] self._max_temp = config[CONF_MAX_TEMP] self._min_temp = config[CONF_MIN_TEMP] self._current_temperature = None self._target_temperature = None self._is_on = None async def async_added_to_hass(self): """Run when entity about to be added to hass.""" await super().async_added_to_hass() await self.address_connection.activate_status_request_handler(self.variable) await self.address_connection.activate_status_request_handler(self.setpoint) @property def supported_features(self): """Return the list of supported features.""" return const.SUPPORT_TARGET_TEMPERATURE @property def temperature_unit(self): """Return the unit of measurement.""" return self.unit.value @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._target_temperature @property def hvac_mode(self): """Return hvac operation ie. heat, cool mode. Need to be one of HVAC_MODE_*. """ if self._is_on: return const.HVAC_MODE_HEAT return const.HVAC_MODE_OFF @property def hvac_modes(self): """Return the list of available hvac operation modes. Need to be a subset of HVAC_MODES. """ modes = [const.HVAC_MODE_HEAT] if self.is_lockable: modes.append(const.HVAC_MODE_OFF) return modes @property def max_temp(self): """Return the maximum temperature.""" return self._max_temp @property def min_temp(self): """Return the minimum temperature.""" return self._min_temp async def async_set_hvac_mode(self, hvac_mode): """Set new target hvac mode.""" if hvac_mode == const.HVAC_MODE_HEAT: self._is_on = True self.address_connection.lock_regulator(self.regulator_id, False) elif hvac_mode == const.HVAC_MODE_OFF: self._is_on = False self.address_connection.lock_regulator(self.regulator_id, True) self._target_temperature = None self.async_write_ha_state() async def async_set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if temperature is None: return self._target_temperature = temperature self.address_connection.var_abs( self.setpoint, self._target_temperature, self.unit ) self.async_write_ha_state() def input_received(self, input_obj): """Set temperature value when LCN input object is received.""" if not isinstance(input_obj, pypck.inputs.ModStatusVar): return if input_obj.get_var() == self.variable: self._current_temperature = input_obj.get_value().to_var_unit(self.unit) elif input_obj.get_var() == self.setpoint: self._is_on = not input_obj.get_value().is_locked_regulator() if self._is_on: self._target_temperature = input_obj.get_value().to_var_unit(self.unit) self.async_write_ha_state()
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/lcn/climate.py
"""Support for Apple TV media player.""" import logging import pyatv.const as atv_const from homeassistant.components.media_player import MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STOP, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, ) from homeassistant.core import callback import homeassistant.util.dt as dt_util from . import ATTR_ATV, ATTR_POWER, DATA_APPLE_TV, DATA_ENTITIES _LOGGER = logging.getLogger(__name__) SUPPORT_APPLE_TV = ( SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PLAY_MEDIA | SUPPORT_PAUSE | SUPPORT_PLAY | SUPPORT_SEEK | SUPPORT_STOP | SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Apple TV platform.""" if not discovery_info: return # Manage entity cache for service handler if DATA_ENTITIES not in hass.data: hass.data[DATA_ENTITIES] = [] name = discovery_info[CONF_NAME] host = discovery_info[CONF_HOST] atv = hass.data[DATA_APPLE_TV][host][ATTR_ATV] power = hass.data[DATA_APPLE_TV][host][ATTR_POWER] entity = AppleTvDevice(atv, name, power) @callback def on_hass_stop(event): """Stop push updates when hass stops.""" atv.push_updater.stop() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) if entity not in hass.data[DATA_ENTITIES]: hass.data[DATA_ENTITIES].append(entity) async_add_entities([entity]) class AppleTvDevice(MediaPlayerEntity): """Representation of an Apple TV device.""" def __init__(self, atv, name, power): """Initialize the Apple TV device.""" self.atv = atv self._name = name self._playing = None self._power = power self._power.listeners.append(self) self.atv.push_updater.listener = self async def async_added_to_hass(self): """Handle when an entity is about to be added to Home Assistant.""" self._power.init() @property def name(self): """Return the name of the device.""" return self._name @property def unique_id(self): """Return a unique ID.""" return self.atv.metadata.device_id @property def should_poll(self): """No polling needed.""" return False @property def state(self): """Return the state of the device.""" if not self._power.turned_on: return STATE_OFF if self._playing: state = self._playing.play_state if state in ( atv_const.PLAY_STATE_IDLE, atv_const.PLAY_STATE_NO_MEDIA, atv_const.PLAY_STATE_LOADING, ): return STATE_IDLE if state == atv_const.PLAY_STATE_PLAYING: return STATE_PLAYING if state in ( atv_const.PLAY_STATE_PAUSED, atv_const.PLAY_STATE_FAST_FORWARD, atv_const.PLAY_STATE_FAST_BACKWARD, atv_const.PLAY_STATE_STOPPED, ): # Catch fast forward/backward here so "play" is default action return STATE_PAUSED return STATE_STANDBY # Bad or unknown state? @callback def playstatus_update(self, updater, playing): """Print what is currently playing when it changes.""" self._playing = playing self.async_write_ha_state() @callback def playstatus_error(self, updater, exception): """Inform about an error and restart push updates.""" _LOGGER.warning("A %s error occurred: %s", exception.__class__, exception) # This will wait 10 seconds before restarting push updates. If the # connection continues to fail, it will flood the log (every 10 # seconds) until it succeeds. A better approach should probably be # implemented here later. updater.start(initial_delay=10) self._playing = None self.async_write_ha_state() @property def media_content_type(self): """Content type of current playing media.""" if self._playing: media_type = self._playing.media_type if media_type == atv_const.MEDIA_TYPE_VIDEO: return MEDIA_TYPE_VIDEO if media_type == atv_const.MEDIA_TYPE_MUSIC: return MEDIA_TYPE_MUSIC if media_type == atv_const.MEDIA_TYPE_TV: return MEDIA_TYPE_TVSHOW @property def media_duration(self): """Duration of current playing media in seconds.""" if self._playing: return self._playing.total_time @property def media_position(self): """Position of current playing media in seconds.""" if self._playing: return self._playing.position @property def media_position_updated_at(self): """Last valid time of media position.""" state = self.state if state in (STATE_PLAYING, STATE_PAUSED): return dt_util.utcnow() async def async_play_media(self, media_type, media_id, **kwargs): """Send the play_media command to the media player.""" await self.atv.airplay.play_url(media_id) @property def media_image_hash(self): """Hash value for media image.""" state = self.state if self._playing and state not in [STATE_OFF, STATE_IDLE]: return self._playing.hash async def async_get_media_image(self): """Fetch media image of current playing image.""" state = self.state if self._playing and state not in [STATE_OFF, STATE_IDLE]: return (await self.atv.metadata.artwork()), "image/png" return None, None @property def media_title(self): """Title of current playing media.""" if self._playing: if self.state == STATE_IDLE: return "Nothing playing" title = self._playing.title return title if title else "No title" return f"Establishing a connection to {self._name}..." @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_APPLE_TV async def async_turn_on(self): """Turn the media player on.""" self._power.set_power_on(True) async def async_turn_off(self): """Turn the media player off.""" self._playing = None self._power.set_power_on(False) async def async_media_play_pause(self): """Pause media on media player.""" if not self._playing: return state = self.state if state == STATE_PAUSED: await self.atv.remote_control.play() elif state == STATE_PLAYING: await self.atv.remote_control.pause() async def async_media_play(self): """Play media.""" if self._playing: await self.atv.remote_control.play() async def async_media_stop(self): """Stop the media player.""" if self._playing: await self.atv.remote_control.stop() async def async_media_pause(self): """Pause the media player.""" if self._playing: await self.atv.remote_control.pause() async def async_media_next_track(self): """Send next track command.""" if self._playing: await self.atv.remote_control.next() async def async_media_previous_track(self): """Send previous track command.""" if self._playing: await self.atv.remote_control.previous() async def async_media_seek(self, position): """Send seek command.""" if self._playing: await self.atv.remote_control.set_position(position)
"""The test for the geo rss events sensor platform.""" import pytest from homeassistant.components import sensor import homeassistant.components.geo_rss_events.sensor as geo_rss_events from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import assert_setup_component, async_fire_time_changed URL = "http://geo.rss.local/geo_rss_events.xml" VALID_CONFIG_WITH_CATEGORIES = { sensor.DOMAIN: [ { "platform": "geo_rss_events", geo_rss_events.CONF_URL: URL, geo_rss_events.CONF_CATEGORIES: ["Category 1"], } ] } VALID_CONFIG = { sensor.DOMAIN: [{"platform": "geo_rss_events", geo_rss_events.CONF_URL: URL}] } """Test the GeoRss service updater.""" @pytest.fixture def mock_feed(): """Pytest fixture for homeassistant.components.geo_rss_events.sensor.GenericFeed.""" with patch( "homeassistant.components.geo_rss_events.sensor.GenericFeed" ) as mock_feed: yield mock_feed def _generate_mock_feed_entry( external_id, title, distance_to_home, coordinates, category ): """Construct a mock feed entry for testing purposes.""" feed_entry = MagicMock() feed_entry.external_id = external_id feed_entry.title = title feed_entry.distance_to_home = distance_to_home feed_entry.coordinates = coordinates feed_entry.category = category return feed_entry async def test_setup(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] utcnow = dt_util.utcnow() # Patching 'utcnow' to gain more control over the timed update. with patch("homeassistant.util.dt.utcnow", return_value=utcnow): with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component(hass, sensor.DOMAIN, VALID_CONFIG) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert state is not None assert state.name == "Event Service Any" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", } # Simulate an update - empty data, but successful update, # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 2 # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_any") assert int(state.state) == 0 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Any", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", } async def test_setup_with_categories(hass, mock_feed): """Test the general setup of the platform.""" # Set up some mock feed entries for this test. mock_entry_1 = _generate_mock_feed_entry( "1234", "Title 1", 15.5, (-31.0, 150.0), "Category 1" ) mock_entry_2 = _generate_mock_feed_entry( "2345", "Title 2", 20.5, (-31.1, 150.1), "Category 1" ) mock_feed.return_value.update.return_value = "OK", [mock_entry_1, mock_entry_2] with assert_setup_component(1, sensor.DOMAIN): assert await async_setup_component( hass, sensor.DOMAIN, VALID_CONFIG_WITH_CATEGORIES ) # Artificially trigger update. hass.bus.fire(EVENT_HOMEASSISTANT_START) # Collect events. await hass.async_block_till_done() all_states = hass.states.async_all() assert len(all_states) == 1 state = hass.states.get("sensor.event_service_category_1") assert state is not None assert state.name == "Event Service Category 1" assert int(state.state) == 2 assert state.attributes == { ATTR_FRIENDLY_NAME: "Event Service Category 1", ATTR_UNIT_OF_MEASUREMENT: "Events", ATTR_ICON: "mdi:alert", "Title 1": "16km", "Title 2": "20km", }
mezz64/home-assistant
tests/components/geo_rss_events/test_sensor.py
homeassistant/components/apple_tv/media_player.py