Dataset Viewer
Auto-converted to Parquet
input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
# coding: utf-8 from __future__ import absolute_import, unicode_literals from datetime import datetime, timedelta import json import random import string import time from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey import jwt from six import binary_type, string_types, raise_from, text_type from ..config import API from ..exception import BoxOAuthException from .oauth2 import OAuth2 from ..object.user import User from ..util.compat import NoneType class JWTAuth(OAuth2): """ Responsible for handling JWT Auth for Box Developer Edition. Can authenticate enterprise instances or app users. """ _GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer' def __init__( self, client_id, client_secret, enterprise_id, jwt_key_id, rsa_private_key_file_sys_path=None, rsa_private_key_passphrase=None, user=None, store_tokens=None, box_device_id='0', box_device_name='', access_token=None, session=None, jwt_algorithm='RS256', rsa_private_key_data=None, **kwargs ): """Extends baseclass method. Must pass exactly one of either `rsa_private_key_file_sys_path` or `rsa_private_key_data`. If both `enterprise_id` and `user` are non-`None`, the `user` takes precedence when `refresh()` is called. This can be overruled with a call to `authenticate_instance()`. :param client_id: Box API key used for identifying the application the user is authenticating with. :type client_id: `unicode` :param client_secret: Box API secret used for making OAuth2 requests. :type client_secret: `unicode` :param enterprise_id: The ID of the Box Developer Edition enterprise. May be `None`, if the caller knows that it will not be authenticating as an enterprise instance / service account. If `user` is passed, this value is not used, unless `authenticate_instance()` is called to clear the user and authenticate as the enterprise instance. :type enterprise_id: `unicode` or `None` :param jwt_key_id: Key ID for the JWT assertion. :type jwt_key_id: `unicode` :param rsa_private_key_file_sys_path: (optional) Path to an RSA private key file, used for signing the JWT assertion. :type rsa_private_key_file_sys_path: `unicode` :param rsa_private_key_passphrase: Passphrase used to unlock the private key. Do not pass a unicode string - this must be bytes. :type rsa_private_key_passphrase: `bytes` or None :param user: (optional) The user to authenticate, expressed as a Box User ID or as a :class:`User` instance. This value is not required. But if it is provided, then the user will be auto-authenticated at the time of the first API call or when calling `authenticate_user()` without any arguments. Should be `None` if the intention is to authenticate as the enterprise instance / service account. If both `enterprise_id` and `user` are non-`None`, the `user` takes precedense when `refresh()` is called. May be one of this application's created App User. Depending on the configured User Access Level, may also be any other App User or Managed User in the enterprise. <https://developer.box.com/en/guides/applications/> <https://developer.box.com/en/guides/authentication/select/> :type user: `unicode` or :class:`User` or `None` :param store_tokens: Optional callback for getting access to tokens for storing them. :type store_tokens: `callable` :param box_device_id: Optional unique ID of this device. Used for applications that want to support device-pinning. :type box_device_id: `unicode` :param box_device_name: Optional human readable name for this device. :type box_device_name: `unicode` :param access_token: Access token to use for auth until it expires. :type access_token: `unicode` :param network_layer: If specified, use it to make network requests. If not, the default network implementation will be used. :type network_layer: :class:`Network` :param jwt_algorithm: Which algorithm to use for signing the JWT assertion. Must be one of 'RS256', 'RS384', 'RS512'. :type jwt_algorithm: `unicode` :param rsa_private_key_data: (optional) Contents of RSA private key, used for signing the JWT assertion. Do not pass a unicode string. Can pass a byte string, or a file-like object that returns bytes, or an already-loaded `RSAPrivateKey` object. :type rsa_private_key_data: `bytes` or :class:`io.IOBase` or :class:`RSAPrivateKey` """ user_id = self._normalize_user_id(user) rsa_private_key = self._normalize_rsa_private_key( file_sys_path=rsa_private_key_file_sys_path, data=rsa_private_key_data, passphrase=rsa_private_key_passphrase, ) del rsa_private_key_data del rsa_private_key_file_sys_path super(JWTAuth, self).__init__( client_id, client_secret, store_tokens=store_tokens, box_device_id=box_device_id, box_device_name=box_device_name, access_token=access_token, refresh_token=None, session=session, **kwargs ) self._rsa_private_key = rsa_private_key self._enterprise_id = enterprise_id self._jwt_algorithm = jwt_algorithm self._jwt_key_id = jwt_key_id self._user_id = user_id def _construct_and_send_jwt_auth(self, sub, sub_type, now_time=None): """ Construct the claims used for JWT auth and send a request to get a JWT. Pass an enterprise ID to get an enterprise token (which can be used to provision/deprovision users), or a user ID to get a user token. :param sub: The enterprise ID or user ID to auth. :type sub: `unicode` :param sub_type: Either 'enterprise' or 'user' :type sub_type: `unicode` :param now_time: Optional. The current UTC time is needed in order to construct the expiration time of the JWT claim. If None, `datetime.utcnow()` will be used. :type now_time: `datetime` or None :return: The access token for the enterprise or app user. :rtype: `unicode` """ system_random = random.SystemRandom() jti_length = system_random.randint(16, 128) ascii_alphabet = string.ascii_letters + string.digits ascii_len = len(ascii_alphabet) jti = ''.join(ascii_alphabet[int(system_random.random() * ascii_len)] for _ in range(jti_length)) if now_time is None: now_time = datetime.utcnow() now_plus_30 = now_time + timedelta(seconds=30) assertion = jwt.encode( { 'iss': self._client_id, 'sub': sub, 'box_sub_type': sub_type, 'aud': 'https://api.box.com/oauth2/token', 'jti': jti, 'exp': int((now_plus_30 - datetime(1970, 1, 1)).total_seconds()), }, self._rsa_private_key, algorithm=self._jwt_algorithm, headers={ 'kid': self._jwt_key_id, }, ) data = { 'grant_type': self._GRANT_TYPE, 'client_id': self._client_id, 'client_secret': self._client_secret, 'assertion': assertion, } if self._box_device_id: data['box_device_id'] = self._box_device_id if self._box_device_name: data['box_device_name'] = self._box_device_name return self.send_token_request(data, access_token=None, expect_refresh_token=False)[0] def _auth_with_jwt(self, sub, sub_type): """ Auth with JWT. If authorization fails because the expiration time is out of sync with the Box servers, retry using the time returned in the error response. Pass an enterprise ID to get an enterprise token (which can be used to provision/deprovision users), or a user ID to get a user token. :param sub: The enterprise ID or user ID to auth. :type sub: `unicode` :param sub_type: Either 'enterprise' or 'user' :type sub_type: `unicode` :return: The access token for the enterprise or app user. :rtype: `unicode` """ attempt_number = 0 jwt_time = None while True: try: return self._construct_and_send_jwt_auth(sub, sub_type, jwt_time) except BoxOAuthException as ex: network_response = ex.network_response code = network_response.status_code # pylint: disable=maybe-no-member box_datetime = self._get_date_header(network_response) if attempt_number >= API.MAX_RETRY_ATTEMPTS: raise ex if (code == 429 or code >= 500): jwt_time = None elif box_datetime is not None and self._was_exp_claim_rejected_due_to_clock_skew(network_response): jwt_time = box_datetime else: raise ex time_delay = self._session.get_retry_after_time(attempt_number, network_response.headers.get('Retry-After', None)) # pylint: disable=maybe-no-member time.sleep(time_delay) attempt_number += 1 self._logger.debug('Retrying JWT request') @staticmethod def _get_date_header(network_response): """ Get datetime object for Date header, if the Date header is available. :param network_response: The response from the Box API that should include a Date header. :type network_response: :class:`Response` :return: The datetime parsed from the Date header, or None if the header is absent or if it couldn't be parsed. :rtype: `datetime` or `None` """ box_date_header = network_response.headers.get('Date', None) if box_date_header is not None: try: return datetime.strptime(box_date_header, '%a, %d %b %Y %H:%M:%S %Z') except ValueError: pass return None @staticmethod def _was_exp_claim_rejected_due_to_clock_skew(network_response): """ Determine whether the network response indicates that the authorization request was rejected because of the exp claim. This can happen if the current system time is too different from the Box server time. Returns True if the status code is 400, the error code is invalid_grant, and the error description indicates a problem with the exp claim; False, otherwise. :param network_response: :type network_response: :class:`Response` :rtype: `bool` """ status_code = network_response.status_code try: json_response = network_response.json() except ValueError: return False error_code = json_response.get('error', '') error_description = json_response.get('error_description', '') return status_code == 400 and error_code == 'invalid_grant' and 'exp' in error_description def authenticate_user(self, user=None): """ Get an access token for a User. May be one of this application's created App User. Depending on the configured User Access Level, may also be any other App User or Managed User in the enterprise. <https://developer.box.com/en/guides/applications/> <https://developer.box.com/en/guides/authentication/select/> :param user: (optional) The user to authenticate, expressed as a Box User ID or as a :class:`User` instance. If not given, then the most recently provided user ID, if available, will be used. :type user: `unicode` or :class:`User` :raises: :exc:`ValueError` if no user ID was passed and the object is not currently configured with one. :return: The access token for the user. :rtype: `unicode` """ sub = self._normalize_user_id(user) or self._user_id if not sub: raise ValueError("authenticate_user: Requires the user ID, but it was not provided.") self._user_id = sub return self._auth_with_jwt(sub, 'user') authenticate_app_user = authenticate_user @classmethod def _normalize_user_id(cls, user): """Get a Box user ID from a selection of supported param types. :param user: An object representing the user or user ID. Currently supported types are `unicode` (which represents the user ID) and :class:`User`. If `None`, returns `None`. :raises: :exc:`TypeError` for unsupported types. :rtype: `unicode` or `None` """ if user is None: return None if isinstance(user, User): return user.object_id if isinstance(user, string_types): return text_type(user) raise TypeError("Got unsupported type {0!r} for user.".format(user.__class__.__name__)) def authenticate_instance(self, enterprise=None): """ Get an access token for a Box Developer Edition enterprise. :param enterprise: The ID of the Box Developer Edition enterprise. Optional if the value was already given to `__init__`, otherwise required. :type enterprise: `unicode` or `None` :raises: :exc:`ValueError` if `None` was passed for the enterprise ID here and in `__init__`, or if the non-`None` value passed here does not match the non-`None` value passed to `__init__`. :return: The access token for the enterprise which can provision/deprovision app users. :rtype: `unicode` """ enterprises = [enterprise, self._enterprise_id] if not any(enterprises): raise ValueError("authenticate_instance: Requires the enterprise ID, but it was not provided.") if all(enterprises) and (enterprise != self._enterprise_id): raise ValueError( "authenticate_instance: Given enterprise ID {given_enterprise!r}, but {auth} already has ID {existing_enterprise!r}" .format(auth=self, given_enterprise=enterprise, existing_enterprise=self._enterprise_id) ) if not self._enterprise_id: self._enterprise_id = enterprise self._user_id = None return self._auth_with_jwt(self._enterprise_id, 'enterprise') def _refresh(self, access_token): """ Base class override. Instead of refreshing an access token using a refresh token, we just issue a new JWT request. """ # pylint:disable=unused-argument if self._user_id is None: new_access_token = self.authenticate_instance() else: new_access_token = self.authenticate_user() return new_access_token, None @classmethod def _normalize_rsa_private_key(cls, file_sys_path, data, passphrase=None): if len(list(filter(None, [file_sys_path, data]))) != 1: raise TypeError("must pass exactly one of either rsa_private_key_file_sys_path or rsa_private_key_data") if file_sys_path: with open(file_sys_path, 'rb') as key_file: data = key_file.read() if hasattr(data, 'read') and callable(data.read): data = data.read() if isinstance(data, text_type): try: data = data.encode('ascii') except UnicodeError: raise_from( TypeError("rsa_private_key_data must contain binary data (bytes/str), not a text/unicode string"), None, ) if isinstance(data, binary_type): passphrase = cls._normalize_rsa_private_key_passphrase(passphrase) return serialization.load_pem_private_key( data, password=passphrase, backend=default_backend(), ) if isinstance(data, RSAPrivateKey): return data raise TypeError( 'rsa_private_key_data must be binary data (bytes/str), ' 'a file-like object with a read() method, ' 'or an instance of RSAPrivateKey, ' 'but got {0!r}' .format(data.__class__.__name__) ) @staticmethod def _normalize_rsa_private_key_passphrase(passphrase): if isinstance(passphrase, text_type): try: return passphrase.encode('ascii') except UnicodeError: raise_from( TypeError("rsa_private_key_passphrase must contain binary data (bytes/str), not a text/unicode string"), None, ) if not isinstance(passphrase, (binary_type, NoneType)): raise TypeError( "rsa_private_key_passphrase must contain binary data (bytes/str), got {0!r}" .format(passphrase.__class__.__name__) ) return passphrase @classmethod def from_settings_dictionary(cls, settings_dictionary, **kwargs): """ Create an auth instance as defined by the given settings dictionary. The dictionary should have the structure of the JSON file downloaded from the Box Developer Console. :param settings_dictionary: Dictionary containing settings for configuring app auth. :type settings_dictionary: `dict` :return: Auth instance configured as specified by the config dictionary. :rtype: :class:`JWTAuth` """ if 'boxAppSettings' not in settings_dictionary: raise ValueError('boxAppSettings not present in configuration') return cls( client_id=settings_dictionary['boxAppSettings']['clientID'], client_secret=settings_dictionary['boxAppSettings']['clientSecret'], enterprise_id=settings_dictionary.get('enterpriseID', None), jwt_key_id=settings_dictionary['boxAppSettings']['appAuth'].get('publicKeyID', None), rsa_private_key_data=settings_dictionary['boxAppSettings']['appAuth'].get('privateKey', None), rsa_private_key_passphrase=settings_dictionary['boxAppSettings']['appAuth'].get('passphrase', None), **kwargs ) @classmethod def from_settings_file(cls, settings_file_sys_path, **kwargs): """ Create an auth instance as defined by a JSON file downloaded from the Box Developer Console. See https://developer.box.com/en/guides/authentication/jwt/ for more information. :param settings_file_sys_path: Path to the JSON file containing the configuration. :type settings_file_sys_path: `unicode` :return: Auth instance configured as specified by the JSON file. :rtype: :class:`JWTAuth` """ with open(settings_file_sys_path) as config_file: config_dictionary = json.load(config_file) return cls.from_settings_dictionary(config_dictionary, **kwargs)
# coding: utf-8 from __future__ import absolute_import, unicode_literals from contextlib import contextmanager from datetime import datetime, timedelta import io from itertools import cycle, product import json import random import string from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, generate_private_key as generate_rsa_private_key from cryptography.hazmat.primitives import serialization from mock import Mock, mock_open, patch, sentinel, call import pytest import pytz import requests from six import binary_type, string_types, text_type from boxsdk.auth.jwt_auth import JWTAuth from boxsdk.exception import BoxOAuthException from boxsdk.config import API from boxsdk.object.user import User @pytest.fixture(params=[16, 32, 128]) def jti_length(request): return request.param @pytest.fixture(params=('RS256', 'RS512')) def jwt_algorithm(request): return request.param @pytest.fixture(scope='module') def jwt_key_id(): return 'jwt_key_id_1' @pytest.fixture(scope='module') def rsa_private_key_object(): return generate_rsa_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) @pytest.fixture(params=(None, b'strong_password')) def rsa_passphrase(request): return request.param @pytest.fixture def rsa_private_key_bytes(rsa_private_key_object, rsa_passphrase): encryption = serialization.BestAvailableEncryption(rsa_passphrase) if rsa_passphrase else serialization.NoEncryption() return rsa_private_key_object.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=encryption, ) @pytest.fixture(scope='function') def successful_token_response(successful_token_mock, successful_token_json_response): # pylint:disable=redefined-outer-name response = successful_token_json_response.copy() del response['refresh_token'] successful_token_mock.json = Mock(return_value=response) successful_token_mock.ok = True successful_token_mock.content = json.dumps(response) successful_token_mock.status_code = 200 return successful_token_mock @pytest.mark.parametrize(('key_file', 'key_data'), [(None, None), ('fake sys path', 'fake key data')]) @pytest.mark.parametrize('rsa_passphrase', [None]) def test_jwt_auth_init_raises_type_error_unless_exactly_one_of_rsa_private_key_file_or_data_is_given(key_file, key_data, rsa_private_key_bytes): kwargs = dict( rsa_private_key_data=rsa_private_key_bytes, client_id=None, client_secret=None, jwt_key_id=None, enterprise_id=None, ) JWTAuth(**kwargs) kwargs.update(rsa_private_key_file_sys_path=key_file, rsa_private_key_data=key_data) with pytest.raises(TypeError): JWTAuth(**kwargs) @pytest.mark.parametrize('key_data', [object(), u'ƒøø']) @pytest.mark.parametrize('rsa_passphrase', [None]) def test_jwt_auth_init_raises_type_error_if_rsa_private_key_data_has_unexpected_type(key_data, rsa_private_key_bytes): kwargs = dict( rsa_private_key_data=rsa_private_key_bytes, client_id=None, client_secret=None, jwt_key_id=None, enterprise_id=None, ) JWTAuth(**kwargs) kwargs.update(rsa_private_key_data=key_data) with pytest.raises(TypeError): JWTAuth(**kwargs) @pytest.mark.parametrize('rsa_private_key_data_type', [io.BytesIO, text_type, binary_type, RSAPrivateKey]) def test_jwt_auth_init_accepts_rsa_private_key_data(rsa_private_key_bytes, rsa_passphrase, rsa_private_key_data_type): if rsa_private_key_data_type is text_type: rsa_private_key_data = text_type(rsa_private_key_bytes.decode('ascii')) elif rsa_private_key_data_type is RSAPrivateKey: rsa_private_key_data = serialization.load_pem_private_key( rsa_private_key_bytes, password=rsa_passphrase, backend=default_backend(), ) else: rsa_private_key_data = rsa_private_key_data_type(rsa_private_key_bytes) JWTAuth( rsa_private_key_data=rsa_private_key_data, rsa_private_key_passphrase=rsa_passphrase, client_id=None, client_secret=None, jwt_key_id=None, enterprise_id=None, ) @pytest.fixture(params=[False, True]) def pass_private_key_by_path(request): """For jwt_auth_init_mocks, whether to pass the private key via sys_path (True) or pass the data directly (False).""" return request.param @pytest.fixture def jwt_auth_init_mocks( mock_box_session, successful_token_response, jwt_algorithm, jwt_key_id, rsa_passphrase, rsa_private_key_bytes, pass_private_key_by_path, ): # pylint:disable=redefined-outer-name @contextmanager def _jwt_auth_init_mocks(**kwargs): assert_authed = kwargs.pop('assert_authed', True) fake_client_id = 'fake_client_id' fake_client_secret = 'fake_client_secret' assertion = Mock() data = { 'grant_type': JWTAuth._GRANT_TYPE, # pylint:disable=protected-access 'client_id': fake_client_id, 'client_secret': fake_client_secret, 'assertion': assertion, 'box_device_id': '0', 'box_device_name': 'my_awesome_device', } mock_box_session.request.return_value = successful_token_response with patch('boxsdk.auth.jwt_auth.open', mock_open(read_data=rsa_private_key_bytes), create=True) as jwt_auth_open: with patch('cryptography.hazmat.primitives.serialization.load_pem_private_key') as load_pem_private_key: oauth = JWTAuth( client_id=fake_client_id, client_secret=fake_client_secret, rsa_private_key_file_sys_path=(sentinel.rsa_path if pass_private_key_by_path else None), rsa_private_key_data=(None if pass_private_key_by_path else rsa_private_key_bytes), rsa_private_key_passphrase=rsa_passphrase, session=mock_box_session, box_device_name='my_awesome_device', jwt_algorithm=jwt_algorithm, jwt_key_id=jwt_key_id, enterprise_id=kwargs.pop('enterprise_id', None), **kwargs ) if pass_private_key_by_path: jwt_auth_open.assert_called_once_with(sentinel.rsa_path, 'rb') jwt_auth_open.return_value.read.assert_called_once_with() # pylint:disable=no-member else: jwt_auth_open.assert_not_called() load_pem_private_key.assert_called_once_with( rsa_private_key_bytes, password=rsa_passphrase, backend=default_backend(), ) yield oauth, assertion, fake_client_id, load_pem_private_key.return_value if assert_authed: mock_box_session.request.assert_called_once_with( 'POST', '{0}/token'.format(API.OAUTH2_API_URL), data=data, headers={'content-type': 'application/x-www-form-urlencoded'}, access_token=None, ) assert oauth.access_token == successful_token_response.json()['access_token'] return _jwt_auth_init_mocks def test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor(jwt_auth_init_and_auth_mocks): user = 'fake_user_id' with jwt_auth_init_and_auth_mocks(sub=user, sub_type='user', enterprise_id='fake_enterprise_id', user=user) as oauth: oauth.refresh(None) @pytest.mark.parametrize('jwt_auth_method_name', ['authenticate_user', 'authenticate_instance']) def test_authenticate_raises_value_error_if_sub_was_never_given(jwt_auth_init_mocks, jwt_auth_method_name): with jwt_auth_init_mocks(assert_authed=False) as params: auth = params[0] authenticate_method = getattr(auth, jwt_auth_method_name) with pytest.raises(ValueError): authenticate_method() def test_jwt_auth_constructor_raises_type_error_if_user_is_unsupported_type(jwt_auth_init_mocks): with pytest.raises(TypeError): with jwt_auth_init_mocks(user=object()): assert False def test_authenticate_user_raises_type_error_if_user_is_unsupported_type(jwt_auth_init_mocks): with jwt_auth_init_mocks(assert_authed=False) as params: auth = params[0] with pytest.raises(TypeError): auth.authenticate_user(object()) @pytest.mark.parametrize('user_id_for_init', [None, 'fake_user_id_1']) def test_authenticate_user_saves_user_id_for_future_calls(jwt_auth_init_and_auth_mocks, user_id_for_init, jwt_encode): def assert_jwt_encode_call_args(user_id): assert jwt_encode.call_args[0][0]['sub'] == user_id assert jwt_encode.call_args[0][0]['box_sub_type'] == 'user' jwt_encode.call_args = None with jwt_auth_init_and_auth_mocks(sub=None, sub_type=None, assert_authed=False, user=user_id_for_init) as auth: for new_user_id in ['fake_user_id_2', 'fake_user_id_3']: auth.authenticate_user(new_user_id) assert_jwt_encode_call_args(new_user_id) auth.authenticate_user() assert_jwt_encode_call_args(new_user_id) def test_authenticate_instance_raises_value_error_if_different_enterprise_id_is_given(jwt_auth_init_mocks): with jwt_auth_init_mocks(enterprise_id='fake_enterprise_id_1', assert_authed=False) as params: auth = params[0] with pytest.raises(ValueError): auth.authenticate_instance('fake_enterprise_id_2') def test_authenticate_instance_saves_enterprise_id_for_future_calls(jwt_auth_init_and_auth_mocks): enterprise_id = 'fake_enterprise_id' with jwt_auth_init_and_auth_mocks(sub=enterprise_id, sub_type='enterprise', assert_authed=False) as auth: auth.authenticate_instance(enterprise_id) auth.authenticate_instance() auth.authenticate_instance(enterprise_id) with pytest.raises(ValueError): auth.authenticate_instance('fake_enterprise_id_2') @pytest.yield_fixture def jwt_encode(): with patch('jwt.encode') as patched_jwt_encode: yield patched_jwt_encode @pytest.fixture def jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, jwt_encode): @contextmanager def _jwt_auth_auth_mocks(sub, sub_type, oauth, assertion, client_id, secret, assert_authed=True): # pylint:disable=redefined-outer-name with patch('boxsdk.auth.jwt_auth.datetime') as mock_datetime: with patch('boxsdk.auth.jwt_auth.random.SystemRandom') as mock_system_random: jwt_encode.return_value = assertion mock_datetime.utcnow.return_value = datetime(2015, 7, 6, 12, 1, 2) mock_datetime.return_value = datetime(1970, 1, 1) now_plus_30 = mock_datetime.utcnow.return_value + timedelta(seconds=30) exp = int((now_plus_30 - datetime(1970, 1, 1)).total_seconds()) system_random = mock_system_random.return_value system_random.randint.return_value = jti_length random_choices = [random.random() for _ in range(jti_length)] # Use cycle so that we can do auth more than once inside the context manager. system_random.random.side_effect = cycle(random_choices) ascii_alphabet = string.ascii_letters + string.digits ascii_len = len(ascii_alphabet) jti = ''.join(ascii_alphabet[int(r * ascii_len)] for r in random_choices) yield oauth if assert_authed: system_random.randint.assert_called_once_with(16, 128) assert len(system_random.random.mock_calls) == jti_length jwt_encode.assert_called_once_with({ 'iss': client_id, 'sub': sub, 'box_sub_type': sub_type, 'aud': 'https://api.box.com/oauth2/token', 'jti': jti, 'exp': exp, }, secret, algorithm=jwt_algorithm, headers={'kid': jwt_key_id}) return _jwt_auth_auth_mocks @pytest.fixture def jwt_auth_init_and_auth_mocks(jwt_auth_init_mocks, jwt_auth_auth_mocks): @contextmanager def _jwt_auth_init_and_auth_mocks(sub, sub_type, *jwt_auth_init_mocks_args, **jwt_auth_init_mocks_kwargs): assert_authed = jwt_auth_init_mocks_kwargs.pop('assert_authed', True) with jwt_auth_init_mocks(*jwt_auth_init_mocks_args, assert_authed=assert_authed, **jwt_auth_init_mocks_kwargs) as params: with jwt_auth_auth_mocks(sub, sub_type, *params, assert_authed=assert_authed) as oauth: yield oauth return _jwt_auth_init_and_auth_mocks @pytest.mark.parametrize( ('user', 'pass_in_init'), list(product([str('fake_user_id'), text_type('fake_user_id'), User(None, 'fake_user_id')], [False, True])), ) def test_authenticate_user_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks, user, pass_in_init): # pylint:disable=redefined-outer-name if isinstance(user, User): user_id = user.object_id elif isinstance(user, string_types): user_id = user else: raise NotImplementedError init_kwargs = {} authenticate_params = [] if pass_in_init: init_kwargs['user'] = user else: authenticate_params.append(user) with jwt_auth_init_and_auth_mocks(user_id, 'user', **init_kwargs) as oauth: oauth.authenticate_user(*authenticate_params) @pytest.mark.parametrize(('pass_in_init', 'pass_in_auth'), [(True, False), (False, True), (True, True)]) def test_authenticate_instance_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks, pass_in_init, pass_in_auth): # pylint:disable=redefined-outer-name enterprise_id = 'fake_enterprise_id' init_kwargs = {} auth_params = [] if pass_in_init: init_kwargs['enterprise_id'] = enterprise_id if pass_in_auth: auth_params.append(enterprise_id) with jwt_auth_init_and_auth_mocks(enterprise_id, 'enterprise', **init_kwargs) as oauth: oauth.authenticate_instance(*auth_params) def test_refresh_app_user_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks): # pylint:disable=redefined-outer-name fake_user_id = 'fake_user_id' with jwt_auth_init_and_auth_mocks(fake_user_id, 'user', user=fake_user_id) as oauth: oauth.refresh(None) def test_refresh_instance_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks): # pylint:disable=redefined-outer-name enterprise_id = 'fake_enterprise_id' with jwt_auth_init_and_auth_mocks(enterprise_id, 'enterprise', enterprise_id=enterprise_id) as oauth: oauth.refresh(None) @pytest.fixture() def jwt_subclass_that_just_stores_params(): class StoreParamJWTAuth(JWTAuth): def __init__(self, **kwargs): self.kwargs = kwargs super(StoreParamJWTAuth, self).__init__(**kwargs) return StoreParamJWTAuth @pytest.fixture def fake_client_id(): return 'fake_client_id' @pytest.fixture def fake_client_secret(): return 'fake_client_secret' @pytest.fixture def fake_enterprise_id(): return 'fake_enterprise_id' @pytest.fixture def app_config_json_content( fake_client_id, fake_client_secret, fake_enterprise_id, jwt_key_id, rsa_private_key_bytes, rsa_passphrase, ): template = r""" {{ "boxAppSettings": {{ "clientID": "{client_id}", "clientSecret": "{client_secret}", "appAuth": {{ "publicKeyID": "{jwt_key_id}", "privateKey": "{private_key}", "passphrase": {passphrase} }} }}, "enterpriseID": {enterprise_id} }}""" return template.format( client_id=fake_client_id, client_secret=fake_client_secret, jwt_key_id=jwt_key_id, private_key=rsa_private_key_bytes.replace(b"\n", b"\\n").decode(), passphrase=json.dumps(rsa_passphrase and rsa_passphrase.decode()), enterprise_id=json.dumps(fake_enterprise_id), ) @pytest.fixture() def assert_jwt_kwargs_expected( fake_client_id, fake_client_secret, fake_enterprise_id, jwt_key_id, rsa_private_key_bytes, rsa_passphrase, ): def _assert_jwt_kwargs_expected(jwt_auth): assert jwt_auth.kwargs['client_id'] == fake_client_id assert jwt_auth.kwargs['client_secret'] == fake_client_secret assert jwt_auth.kwargs['enterprise_id'] == fake_enterprise_id assert jwt_auth.kwargs['jwt_key_id'] == jwt_key_id assert jwt_auth.kwargs['rsa_private_key_data'] == rsa_private_key_bytes.decode() assert jwt_auth.kwargs['rsa_private_key_passphrase'] == (rsa_passphrase and rsa_passphrase.decode()) return _assert_jwt_kwargs_expected def test_from_config_file( jwt_subclass_that_just_stores_params, app_config_json_content, assert_jwt_kwargs_expected, ): # pylint:disable=redefined-outer-name with patch('boxsdk.auth.jwt_auth.open', mock_open(read_data=app_config_json_content), create=True): jwt_auth_from_config_file = jwt_subclass_that_just_stores_params.from_settings_file('fake_config_file_sys_path') assert_jwt_kwargs_expected(jwt_auth_from_config_file) def test_from_settings_dictionary( jwt_subclass_that_just_stores_params, app_config_json_content, assert_jwt_kwargs_expected, ): jwt_auth_from_dictionary = jwt_subclass_that_just_stores_params.from_settings_dictionary(json.loads(app_config_json_content)) assert_jwt_kwargs_expected(jwt_auth_from_dictionary) @pytest.fixture def expect_auth_retry(status_code, error_description, include_date_header, error_code): return status_code == 400 and 'exp' in error_description and include_date_header and error_code == 'invalid_grant' @pytest.fixture def box_datetime(): return datetime.now(tz=pytz.utc) - timedelta(100) @pytest.fixture def unsuccessful_jwt_response(box_datetime, status_code, error_description, include_date_header, error_code): headers = {'Date': box_datetime.strftime('%a, %d %b %Y %H:%M:%S %Z')} if include_date_header else {} unsuccessful_response = Mock(requests.Response(), headers=headers) unsuccessful_response.json.return_value = {'error_description': error_description, 'error': error_code} unsuccessful_response.status_code = status_code unsuccessful_response.ok = False return unsuccessful_response @pytest.mark.parametrize('jwt_algorithm', ('RS512',)) @pytest.mark.parametrize('rsa_passphrase', (None,)) @pytest.mark.parametrize('pass_private_key_by_path', (False,)) @pytest.mark.parametrize('status_code', (400, 401)) @pytest.mark.parametrize('error_description', ('invalid box_sub_type claim', 'invalid kid', "check the 'exp' claim")) @pytest.mark.parametrize('error_code', ('invalid_grant', 'bad_request')) @pytest.mark.parametrize('include_date_header', (True, False)) def test_auth_retry_for_invalid_exp_claim( jwt_auth_init_mocks, expect_auth_retry, unsuccessful_jwt_response, box_datetime, ): # pylint:disable=redefined-outer-name enterprise_id = 'fake_enterprise_id' with jwt_auth_init_mocks(assert_authed=False) as params: auth = params[0] with patch.object(auth, '_construct_and_send_jwt_auth') as mock_send_jwt: mock_send_jwt.side_effect = [BoxOAuthException(400, network_response=unsuccessful_jwt_response), 'jwt_token'] if not expect_auth_retry: with pytest.raises(BoxOAuthException): auth.authenticate_instance(enterprise_id) else: auth.authenticate_instance(enterprise_id) expected_calls = [call(enterprise_id, 'enterprise', None)] if expect_auth_retry: expected_calls.append(call(enterprise_id, 'enterprise', box_datetime.replace(microsecond=0, tzinfo=None))) assert len(mock_send_jwt.mock_calls) == len(expected_calls) mock_send_jwt.assert_has_calls(expected_calls) @pytest.mark.parametrize('jwt_algorithm', ('RS512',)) @pytest.mark.parametrize('rsa_passphrase', (None,)) @pytest.mark.parametrize('pass_private_key_by_path', (False,)) @pytest.mark.parametrize('status_code', (429,)) @pytest.mark.parametrize('error_description', ('Request rate limit exceeded',)) @pytest.mark.parametrize('error_code', ('rate_limit_exceeded',)) @pytest.mark.parametrize('include_date_header', (False,)) def test_auth_retry_for_rate_limit_error( jwt_auth_init_mocks, unsuccessful_jwt_response, ): # pylint:disable=redefined-outer-name enterprise_id = 'fake_enterprise_id' with jwt_auth_init_mocks(assert_authed=False) as params: auth = params[0] with patch.object(auth, '_construct_and_send_jwt_auth') as mock_send_jwt: side_effect = [] expected_calls = [] # Retries multiple times, but less than max retries. Then succeeds when it gets a token. for _ in range(API.MAX_RETRY_ATTEMPTS - 2): side_effect.append(BoxOAuthException(429, network_response=unsuccessful_jwt_response)) expected_calls.append(call(enterprise_id, 'enterprise', None)) side_effect.append('jwt_token') expected_calls.append(call(enterprise_id, 'enterprise', None)) mock_send_jwt.side_effect = side_effect auth.authenticate_instance(enterprise_id) assert len(mock_send_jwt.mock_calls) == len(expected_calls) mock_send_jwt.assert_has_calls(expected_calls) @pytest.mark.parametrize('jwt_algorithm', ('RS512',)) @pytest.mark.parametrize('rsa_passphrase', (None,)) @pytest.mark.parametrize('pass_private_key_by_path', (False,)) @pytest.mark.parametrize('status_code', (429,)) @pytest.mark.parametrize('error_description', ('Request rate limit exceeded',)) @pytest.mark.parametrize('error_code', ('rate_limit_exceeded',)) @pytest.mark.parametrize('include_date_header', (False,)) def test_auth_max_retries_for_rate_limit_error( jwt_auth_init_mocks, unsuccessful_jwt_response, ): # pylint:disable=redefined-outer-name enterprise_id = 'fake_enterprise_id' with jwt_auth_init_mocks(assert_authed=False) as params: auth = params[0] with patch.object(auth, '_construct_and_send_jwt_auth') as mock_send_jwt: side_effect = [] expected_calls = [] # Retries max number of times, then throws the error for _ in range(API.MAX_RETRY_ATTEMPTS + 1): side_effect.append(BoxOAuthException(429, network_response=unsuccessful_jwt_response)) expected_calls.append(call(enterprise_id, 'enterprise', None)) mock_send_jwt.side_effect = side_effect with pytest.raises(BoxOAuthException) as error: auth.authenticate_instance(enterprise_id) assert error.value.status == 429 assert len(mock_send_jwt.mock_calls) == len(expected_calls) mock_send_jwt.assert_has_calls(expected_calls) @pytest.mark.parametrize('jwt_algorithm', ('RS512',)) @pytest.mark.parametrize('rsa_passphrase', (None,)) @pytest.mark.parametrize('pass_private_key_by_path', (False,)) @pytest.mark.parametrize('status_code', (500,)) @pytest.mark.parametrize('error_description', ('Internal Server Error',)) @pytest.mark.parametrize('error_code', ('internal_server_error',)) @pytest.mark.parametrize('include_date_header', (False,)) def test_auth_retry_for_internal_server_error( jwt_auth_init_mocks, unsuccessful_jwt_response, ): # pylint:disable=redefined-outer-name enterprise_id = 'fake_enterprise_id' with jwt_auth_init_mocks(assert_authed=False) as params: auth = params[0] with patch.object(auth, '_construct_and_send_jwt_auth') as mock_send_jwt: side_effect = [] expected_calls = [] # Retries multiple times, but less than max retries. Then succeeds when it gets a token. for _ in range(API.MAX_RETRY_ATTEMPTS - 2): side_effect.append(BoxOAuthException(500, network_response=unsuccessful_jwt_response)) expected_calls.append(call(enterprise_id, 'enterprise', None)) side_effect.append('jwt_token') expected_calls.append(call(enterprise_id, 'enterprise', None)) mock_send_jwt.side_effect = side_effect auth.authenticate_instance(enterprise_id) assert len(mock_send_jwt.mock_calls) == len(expected_calls) mock_send_jwt.assert_has_calls(expected_calls) @pytest.mark.parametrize('jwt_algorithm', ('RS512',)) @pytest.mark.parametrize('rsa_passphrase', (None,)) @pytest.mark.parametrize('pass_private_key_by_path', (False,)) @pytest.mark.parametrize('status_code', (500,)) @pytest.mark.parametrize('error_description', ('Internal Server Error',)) @pytest.mark.parametrize('error_code', ('internal_server_error',)) @pytest.mark.parametrize('include_date_header', (False,)) def test_auth_max_retries_for_internal_server_error( jwt_auth_init_mocks, unsuccessful_jwt_response, ): # pylint:disable=redefined-outer-name enterprise_id = 'fake_enterprise_id' with jwt_auth_init_mocks(assert_authed=False) as params: auth = params[0] with patch.object(auth, '_construct_and_send_jwt_auth') as mock_send_jwt: side_effect = [] expected_calls = [] # Retries max number of times, then throws the error for _ in range(API.MAX_RETRY_ATTEMPTS + 1): side_effect.append(BoxOAuthException(500, network_response=unsuccessful_jwt_response)) expected_calls.append(call(enterprise_id, 'enterprise', None)) mock_send_jwt.side_effect = side_effect with pytest.raises(BoxOAuthException) as error: auth.authenticate_instance(enterprise_id) assert error.value.status == 500 assert len(mock_send_jwt.mock_calls) == len(expected_calls) mock_send_jwt.assert_has_calls(expected_calls)
box/box-python-sdk
test/unit/auth/test_jwt_auth.py
boxsdk/auth/jwt_auth.py
from typing import List, Iterator, Union from collections import Counter from KINCluster.core.item import Item from KINCluster.lib.tokenizer import tokenizer from KINCluster import settings import numpy as np from gensim.models import Doc2Vec from gensim.models.doc2vec import TaggedDocument from scipy.cluster import hierarchy as hcluster class Cluster: def __init__(self, **kwargs): """hyperparameters :alpha = learning rate :min_alph = minimum learning reate :window = max value of vector :size = vector size :tokenizer = lambda document: str -> list or words: List[str] """ def getattrs(module): keys = [k for k in dir(module) if not k.startswith('__')] return {key: getattr(module, key) for key in keys} if 'settings' not in kwargs: self.settings = getattrs(settings) else: self.settings = kwargs['settings'] alpha = kwargs.get("alpha", self.settings['LEARNING_RATE']) min_alpha = kwargs.get("min_alpha", self.settings['LEARNING_RATE_MIN']) window = kwargs.get("window", self.settings['WINDOW']) size = kwargs.get("size", self.settings['SIZE']) self.trate = kwargs.get("trate", self.settings['TRANING_RATE']) self.epoch = kwargs.get("epoch", self.settings['EPOCH']) self.thresh = kwargs.get("thresh", self.settings['THRESHOLD']) self.tokenizer = tokenizer.s[kwargs.get("tokenizer", self.settings['TOKENIZER'])] self.model = Doc2Vec(alpha=alpha, min_alpha=min_alpha, window=window, size=size) self._items = [] self._counters = [] self._vectors = [] self._clusters = [] self._dumps = [] def put_item(self, item: Item): self._items.append(item) def __vocabs(self) -> Iterator[TaggedDocument]: for idx, item in enumerate(self._items): token = self.tokenizer(repr(item)) self._counters.append(Counter(token)) yield TaggedDocument(token, ['line_%s' % idx]) def __documents(self) -> Iterator[TaggedDocument]: for idx, item in enumerate(self._items): yield TaggedDocument(self.tokenizer(str(item)), ['line_%s' % idx]) def __cluster(self, method, metric, criterion) -> np.ndarray: return hcluster.fclusterdata(self._vectors, self.thresh, method=method, metric=metric, criterion=criterion) def cluster(self): # COMMENT: Top keyword 만 잘라서 분류해보기 """cluster process : build vocab, using repr of item : train items, using str of item : get _vectors and _clusters """ self.model.build_vocab(self.__vocabs()) documents = list(self.__documents()) for _ in range(self.epoch): self.model.train(documents) self.model.alpha *= self.trate self.model.min_alpha = self.model.alpha self._vectors = np.array(self.model.docvecs) self._clusters = self.__cluster(self.settings['METHOD'], self.settings['METRIC'], self.settings['CRITERION']) dumps = {c: [] for c in self.unique} for cluster, item, vector, counter in zip(self._clusters, self._items, self._vectors, self._counters): dumps[cluster].append((item, vector, counter)) self._dumps = list(dumps.values()) def similar(self, pos, neg=[], top=10): return self.model.most_similar(positive=pos, negative=neg, topn=top) @property def items(self) -> List[Item]: return self._items @property def vocab(self) -> List[str]: return self.model.vocab @property def vocab_count(self) -> List[Counter]: return self._counters @property def dumps(self) -> List[List[Union[Item, np.ndarray]]]: return self._dumps @property def vectors(self) -> np.ndarray: return self._vectors @property def unique(self) -> np.ndarray: return np.unique(self._clusters) @property def clusters(self) -> np.ndarray: return self._clusters @property def distribution(self) -> np.ndarray: return Counter(self._clusters) def __len__(self): return len(self._clusters)
# -*- coding: utf-8 -*- """ tests.cluster --------------- Test cluster of KINCluster :author: MaybeS(maytryark@gmail.com) """ import pytest from KINCluster.core.extractor import Extractor, extractable from KINCluster.core.cluster import Cluster from KINCluster.core.pipeline import Pipeline from KINCluster.core.item import Item from KINCluster.lib.tokenizer import tokenize, stemize import codecs test_text = ['2016헌나1.txt', '2014헌나1.txt'] test_keyword = ['헌법판결문', '헌법판결문'] class Pipeline(Pipeline): def capture_item(self): for text, keyword in zip(test_text, test_keyword): with codecs.open('tests/data/' + text, 'r', 'utf-8') as f: content = f.read() yield Item(title=text,content=content,keyword=keyword,date='') def test_extractor1(): cluster = Cluster(epoch=32, tokenizer="tokenize") pipeline = Pipeline() for item in pipeline.capture_item(): cluster.put_item(item) cluster.cluster() extractor = Extractor(cluster) for idx, dump in enumerate(cluster.dumps): items, vectors, counter = map(list, zip(*dump)) assert set(['items', 'vectors', 'counter', 'center', 'keywords']) == set(extractable.s.keys()) extracted = extractor.dump(idx) assert isinstance(extracted, Item) assert isinstance(extracted.keywords, list) assert 32 == len(extracted.keywords)
memento7/KINCluster
tests/test_extractor.py
KINCluster/core/cluster.py
# Copyright 2015 Cloudera Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from impala.error import Error as ImpylaError # noqa from impala.error import HiveServer2Error as HS2Error # noqa import impala.dbapi as impyla # noqa
# Copyright 2015 Cloudera Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import pandas as pd import pytest import ibis import ibis.expr.datatypes as dt import ibis.expr.types as ir from ibis.compat import unittest from ibis.common import IbisTypeError from ibis.impala.client import pandas_to_ibis_schema from ibis.impala.tests.common import ImpalaE2E functional_alltypes_with_nulls = pd.DataFrame({ 'bigint_col': np.int64([0, 10, 20, 30, 40, 50, 60, 70, 80, 90]), 'bool_col': np.bool_([True, False, True, False, True, None, True, False, True, False]), 'date_string_col': ['11/01/10', None, '11/01/10', '11/01/10', '11/01/10', '11/01/10', '11/01/10', '11/01/10', '11/01/10', '11/01/10'], 'double_col': np.float64([0.0, 10.1, None, 30.299999999999997, 40.399999999999999, 50.5, 60.599999999999994, 70.700000000000003, 80.799999999999997, 90.899999999999991]), 'float_col': np.float32([None, 1.1000000238418579, 2.2000000476837158, 3.2999999523162842, 4.4000000953674316, 5.5, 6.5999999046325684, 7.6999998092651367, 8.8000001907348633, 9.8999996185302734]), 'int_col': np.int32([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), 'month': [11, 11, 11, 11, 2, 11, 11, 11, 11, 11], 'smallint_col': np.int16([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), 'string_col': ['0', '1', None, '3', '4', '5', '6', '7', '8', '9'], 'timestamp_col': [pd.Timestamp('2010-11-01 00:00:00'), None, pd.Timestamp('2010-11-01 00:02:00.100000'), pd.Timestamp('2010-11-01 00:03:00.300000'), pd.Timestamp('2010-11-01 00:04:00.600000'), pd.Timestamp('2010-11-01 00:05:00.100000'), pd.Timestamp('2010-11-01 00:06:00.150000'), pd.Timestamp('2010-11-01 00:07:00.210000'), pd.Timestamp('2010-11-01 00:08:00.280000'), pd.Timestamp('2010-11-01 00:09:00.360000')], 'tinyint_col': np.int8([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), 'year': [2010, 2010, 2010, 2010, 2010, 2010, 2010, 2010, 2010, 2010]}) class TestPandasTypeInterop(unittest.TestCase): def test_series_to_ibis_literal(self): values = [1, 2, 3, 4] s = pd.Series(values) expr = ir.as_value_expr(s) expected = ir.sequence(list(s)) assert expr.equals(expected) class TestPandasSchemaInference(unittest.TestCase): def test_dtype_bool(self): df = pd.DataFrame({'col': [True, False, False]}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'boolean')]) assert inferred == expected def test_dtype_int8(self): df = pd.DataFrame({'col': np.int8([-3, 9, 17])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int8')]) assert inferred == expected def test_dtype_int16(self): df = pd.DataFrame({'col': np.int16([-5, 0, 12])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int16')]) assert inferred == expected def test_dtype_int32(self): df = pd.DataFrame({'col': np.int32([-12, 3, 25000])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int32')]) assert inferred == expected def test_dtype_int64(self): df = pd.DataFrame({'col': np.int64([102, 67228734, -0])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int64')]) assert inferred == expected def test_dtype_float32(self): df = pd.DataFrame({'col': np.float32([45e-3, -0.4, 99.])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'float')]) assert inferred == expected def test_dtype_float64(self): df = pd.DataFrame({'col': np.float64([-3e43, 43., 10000000.])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'double')]) assert inferred == expected def test_dtype_uint8(self): df = pd.DataFrame({'col': np.uint8([3, 0, 16])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int16')]) assert inferred == expected def test_dtype_uint16(self): df = pd.DataFrame({'col': np.uint16([5569, 1, 33])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int32')]) assert inferred == expected def test_dtype_uint32(self): df = pd.DataFrame({'col': np.uint32([100, 0, 6])}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int64')]) assert inferred == expected def test_dtype_uint64(self): df = pd.DataFrame({'col': np.uint64([666, 2, 3])}) with self.assertRaises(IbisTypeError): inferred = pandas_to_ibis_schema(df) # noqa def test_dtype_datetime64(self): df = pd.DataFrame({ 'col': [pd.Timestamp('2010-11-01 00:01:00'), pd.Timestamp('2010-11-01 00:02:00.1000'), pd.Timestamp('2010-11-01 00:03:00.300000')]}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'timestamp')]) assert inferred == expected def test_dtype_timedelta64(self): df = pd.DataFrame({ 'col': [pd.Timedelta('1 days'), pd.Timedelta('-1 days 2 min 3us'), pd.Timedelta('-2 days +23:57:59.999997')]}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'int64')]) assert inferred == expected def test_dtype_string(self): df = pd.DataFrame({'col': ['foo', 'bar', 'hello']}) inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', 'string')]) assert inferred == expected def test_dtype_categorical(self): df = pd.DataFrame({'col': ['a', 'b', 'c', 'a']}, dtype='category') inferred = pandas_to_ibis_schema(df) expected = ibis.schema([('col', dt.Category(3))]) assert inferred == expected class TestPandasRoundTrip(ImpalaE2E, unittest.TestCase): def test_round_trip(self): pytest.skip('fails') df1 = self.alltypes.execute() df2 = self.con.pandas(df1, 'bamboo', database=self.tmp_db).execute() assert (df1.columns == df2.columns).all() assert (df1.dtypes == df2.dtypes).all() assert (df1 == df2).all().all() def test_round_trip_non_int_missing_data(self): pytest.skip('WM: hangs -- will investigate later') df1 = functional_alltypes_with_nulls table = self.con.pandas(df1, 'fawn', database=self.tmp_db) df2 = table.execute() assert (df1.columns == df2.columns).all() assert (df1.dtypes == df2.dtypes).all() # bool/int cols should be exact assert (df1.bool_col == df2.bool_col).all() assert (df1.tinyint_col == df2.tinyint_col).all() assert (df1.smallint_col == df2.smallint_col).all() assert (df1.int_col == df2.int_col).all() assert (df1.bigint_col == df2.bigint_col).all() assert (df1.month == df2.month).all() assert (df1.year == df2.year).all() # string cols should be equal everywhere except for the NULLs assert ((df1.string_col == df2.string_col) == [1, 1, 0, 1, 1, 1, 1, 1, 1, 1]).all() assert ((df1.date_string_col == df2.date_string_col) == [1, 0, 1, 1, 1, 1, 1, 1, 1, 1]).all() # float cols within tolerance, and NULLs should be False assert ((df1.double_col - df2.double_col < 1e-9) == [1, 1, 0, 1, 1, 1, 1, 1, 1, 1]).all() assert ((df1.float_col - df2.float_col < 1e-9) == [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]).all() def test_round_trip_missing_type_promotion(self): pytest.skip('unfinished') # prepare Impala table with missing ints # TODO: switch to self.con.raw_sql once #412 is fixed create_query = ('CREATE TABLE {0}.missing_ints ' ' (tinyint_col TINYINT, bigint_col BIGINT) ' 'STORED AS PARQUET'.format(self.tmp_db)) insert_query = ('INSERT INTO {0}.missing_ints ' 'VALUES (NULL, 3), (-5, NULL), (19, 444444)'.format( self.tmp_db)) self.con.con.cursor.execute(create_query) self.con.con.cursor.execute(insert_query) table = self.con.table('missing_ints', database=self.tmp_db) df = table.execute() # noqa # REMOVE LATER # WHAT NOW?
aslihandincer/ibis
ibis/impala/tests/test_pandas_interop.py
ibis/impala/compat.py
"""Component to integrate the Home Assistant cloud.""" from hass_nabucasa import Cloud import voluptuous as vol from homeassistant.components.alexa import const as alexa_const from homeassistant.components.google_assistant import const as ga_c from homeassistant.const import ( CONF_DESCRIPTION, CONF_MODE, CONF_NAME, CONF_REGION, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entityfilter from homeassistant.loader import bind_hass from homeassistant.util.aiohttp import MockRequest from . import account_link, http_api from .client import CloudClient from .const import ( CONF_ACCOUNT_LINK_URL, CONF_ACME_DIRECTORY_SERVER, CONF_ALEXA, CONF_ALEXA_ACCESS_TOKEN_URL, CONF_ALIASES, CONF_CLOUDHOOK_CREATE_URL, CONF_COGNITO_CLIENT_ID, CONF_ENTITY_CONFIG, CONF_FILTER, CONF_GOOGLE_ACTIONS, CONF_GOOGLE_ACTIONS_REPORT_STATE_URL, CONF_RELAYER, CONF_REMOTE_API_URL, CONF_SUBSCRIPTION_INFO_URL, CONF_USER_POOL_ID, CONF_VOICE_API_URL, DOMAIN, MODE_DEV, MODE_PROD, ) from .prefs import CloudPreferences DEFAULT_MODE = MODE_PROD SERVICE_REMOTE_CONNECT = "remote_connect" SERVICE_REMOTE_DISCONNECT = "remote_disconnect" ALEXA_ENTITY_SCHEMA = vol.Schema( { vol.Optional(CONF_DESCRIPTION): cv.string, vol.Optional(alexa_const.CONF_DISPLAY_CATEGORIES): cv.string, vol.Optional(CONF_NAME): cv.string, } ) GOOGLE_ENTITY_SCHEMA = vol.Schema( { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_ALIASES): vol.All(cv.ensure_list, [cv.string]), vol.Optional(ga_c.CONF_ROOM_HINT): cv.string, } ) ASSISTANT_SCHEMA = vol.Schema( {vol.Optional(CONF_FILTER, default=dict): entityfilter.FILTER_SCHEMA} ) ALEXA_SCHEMA = ASSISTANT_SCHEMA.extend( {vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: ALEXA_ENTITY_SCHEMA}} ) GACTIONS_SCHEMA = ASSISTANT_SCHEMA.extend( {vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: GOOGLE_ENTITY_SCHEMA}} ) # pylint: disable=no-value-for-parameter CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.In( [MODE_DEV, MODE_PROD] ), vol.Optional(CONF_COGNITO_CLIENT_ID): str, vol.Optional(CONF_USER_POOL_ID): str, vol.Optional(CONF_REGION): str, vol.Optional(CONF_RELAYER): str, vol.Optional(CONF_SUBSCRIPTION_INFO_URL): vol.Url(), vol.Optional(CONF_CLOUDHOOK_CREATE_URL): vol.Url(), vol.Optional(CONF_REMOTE_API_URL): vol.Url(), vol.Optional(CONF_ACME_DIRECTORY_SERVER): vol.Url(), vol.Optional(CONF_ALEXA): ALEXA_SCHEMA, vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA, vol.Optional(CONF_ALEXA_ACCESS_TOKEN_URL): vol.Url(), vol.Optional(CONF_GOOGLE_ACTIONS_REPORT_STATE_URL): vol.Url(), vol.Optional(CONF_ACCOUNT_LINK_URL): vol.Url(), vol.Optional(CONF_VOICE_API_URL): vol.Url(), } ) }, extra=vol.ALLOW_EXTRA, ) class CloudNotAvailable(HomeAssistantError): """Raised when an action requires the cloud but it's not available.""" @bind_hass @callback def async_is_logged_in(hass) -> bool: """Test if user is logged in.""" return DOMAIN in hass.data and hass.data[DOMAIN].is_logged_in @bind_hass @callback def async_active_subscription(hass) -> bool: """Test if user has an active subscription.""" return async_is_logged_in(hass) and not hass.data[DOMAIN].subscription_expired @bind_hass async def async_create_cloudhook(hass, webhook_id: str) -> str: """Create a cloudhook.""" if not async_is_logged_in(hass): raise CloudNotAvailable hook = await hass.data[DOMAIN].cloudhooks.async_create(webhook_id, True) return hook["cloudhook_url"] @bind_hass async def async_delete_cloudhook(hass, webhook_id: str) -> None: """Delete a cloudhook.""" if DOMAIN not in hass.data: raise CloudNotAvailable await hass.data[DOMAIN].cloudhooks.async_delete(webhook_id) @bind_hass @callback def async_remote_ui_url(hass) -> str: """Get the remote UI URL.""" if not async_is_logged_in(hass): raise CloudNotAvailable if not hass.data[DOMAIN].client.prefs.remote_enabled: raise CloudNotAvailable if not hass.data[DOMAIN].remote.instance_domain: raise CloudNotAvailable return f"https://{hass.data[DOMAIN].remote.instance_domain}" def is_cloudhook_request(request): """Test if a request came from a cloudhook. Async friendly. """ return isinstance(request, MockRequest) async def async_setup(hass, config): """Initialize the Home Assistant cloud.""" # Process configs if DOMAIN in config: kwargs = dict(config[DOMAIN]) else: kwargs = {CONF_MODE: DEFAULT_MODE} # Alexa/Google custom config alexa_conf = kwargs.pop(CONF_ALEXA, None) or ALEXA_SCHEMA({}) google_conf = kwargs.pop(CONF_GOOGLE_ACTIONS, None) or GACTIONS_SCHEMA({}) # Cloud settings prefs = CloudPreferences(hass) await prefs.async_initialize() # Initialize Cloud websession = hass.helpers.aiohttp_client.async_get_clientsession() client = CloudClient(hass, prefs, websession, alexa_conf, google_conf) cloud = hass.data[DOMAIN] = Cloud(client, **kwargs) async def _shutdown(event): """Shutdown event.""" await cloud.stop() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown) async def _service_handler(service): """Handle service for cloud.""" if service.service == SERVICE_REMOTE_CONNECT: await cloud.remote.connect() await prefs.async_update(remote_enabled=True) elif service.service == SERVICE_REMOTE_DISCONNECT: await cloud.remote.disconnect() await prefs.async_update(remote_enabled=False) hass.helpers.service.async_register_admin_service( DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler ) hass.helpers.service.async_register_admin_service( DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler ) loaded = False async def _on_connect(): """Discover RemoteUI binary sensor.""" nonlocal loaded # Prevent multiple discovery if loaded: return loaded = True await hass.helpers.discovery.async_load_platform( "binary_sensor", DOMAIN, {}, config ) await hass.helpers.discovery.async_load_platform("stt", DOMAIN, {}, config) await hass.helpers.discovery.async_load_platform("tts", DOMAIN, {}, config) cloud.iot.register_on_connect(_on_connect) await cloud.start() await http_api.async_setup(hass) account_link.async_setup(hass) return True
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/cloud/__init__.py
"""Support for hydrological data from the Fed. Office for the Environment.""" from datetime import timedelta import logging from swisshydrodata import SwissHydroData import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by the Swiss Federal Office for the Environment FOEN" ATTR_DELTA_24H = "delta-24h" ATTR_MAX_1H = "max-1h" ATTR_MAX_24H = "max-24h" ATTR_MEAN_1H = "mean-1h" ATTR_MEAN_24H = "mean-24h" ATTR_MIN_1H = "min-1h" ATTR_MIN_24H = "min-24h" ATTR_PREVIOUS_24H = "previous-24h" ATTR_STATION = "station" ATTR_STATION_UPDATE = "station_update" ATTR_WATER_BODY = "water_body" ATTR_WATER_BODY_TYPE = "water_body_type" CONF_STATION = "station" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) SENSOR_DISCHARGE = "discharge" SENSOR_LEVEL = "level" SENSOR_TEMPERATURE = "temperature" CONDITIONS = { SENSOR_DISCHARGE: "mdi:waves", SENSOR_LEVEL: "mdi:zodiac-aquarius", SENSOR_TEMPERATURE: "mdi:oil-temperature", } CONDITION_DETAILS = [ ATTR_DELTA_24H, ATTR_MAX_1H, ATTR_MAX_24H, ATTR_MEAN_1H, ATTR_MEAN_24H, ATTR_MIN_1H, ATTR_MIN_24H, ATTR_PREVIOUS_24H, ] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STATION): vol.Coerce(int), vol.Optional(CONF_MONITORED_CONDITIONS, default=[SENSOR_TEMPERATURE]): vol.All( cv.ensure_list, [vol.In(CONDITIONS)] ), } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Swiss hydrological sensor.""" station = config.get(CONF_STATION) monitored_conditions = config.get(CONF_MONITORED_CONDITIONS) hydro_data = HydrologicalData(station) hydro_data.update() if hydro_data.data is None: _LOGGER.error("The station doesn't exists: %s", station) return entities = [] for condition in monitored_conditions: entities.append(SwissHydrologicalDataSensor(hydro_data, station, condition)) add_entities(entities, True) class SwissHydrologicalDataSensor(SensorEntity): """Implementation of a Swiss hydrological sensor.""" def __init__(self, hydro_data, station, condition): """Initialize the Swiss hydrological sensor.""" self.hydro_data = hydro_data self._condition = condition self._data = self._state = self._unit_of_measurement = None self._icon = CONDITIONS[condition] self._station = station @property def name(self): """Return the name of the sensor.""" return "{} {}".format(self._data["water-body-name"], self._condition) @property def unique_id(self) -> str: """Return a unique, friendly identifier for this entity.""" return f"{self._station}_{self._condition}" @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" if self._state is not None: return self.hydro_data.data["parameters"][self._condition]["unit"] return None @property def state(self): """Return the state of the sensor.""" if isinstance(self._state, (int, float)): return round(self._state, 2) return None @property def extra_state_attributes(self): """Return the device state attributes.""" attrs = {} if not self._data: attrs[ATTR_ATTRIBUTION] = ATTRIBUTION return attrs attrs[ATTR_WATER_BODY_TYPE] = self._data["water-body-type"] attrs[ATTR_STATION] = self._data["name"] attrs[ATTR_STATION_UPDATE] = self._data["parameters"][self._condition][ "datetime" ] attrs[ATTR_ATTRIBUTION] = ATTRIBUTION for entry in CONDITION_DETAILS: attrs[entry.replace("-", "_")] = self._data["parameters"][self._condition][ entry ] return attrs @property def icon(self): """Icon to use in the frontend.""" return self._icon def update(self): """Get the latest data and update the state.""" self.hydro_data.update() self._data = self.hydro_data.data if self._data is None: self._state = None else: self._state = self._data["parameters"][self._condition]["value"] class HydrologicalData: """The Class for handling the data retrieval.""" def __init__(self, station): """Initialize the data object.""" self.station = station self.data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data.""" shd = SwissHydroData() self.data = shd.get_station(self.station)
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/swiss_hydrological_data/sensor.py
"""Support for MQTT vacuums.""" import functools import voluptuous as vol from homeassistant.components.vacuum import DOMAIN from homeassistant.helpers.reload import async_setup_reload_service from .. import DOMAIN as MQTT_DOMAIN, PLATFORMS from ..mixins import async_setup_entry_helper from .schema import CONF_SCHEMA, LEGACY, MQTT_VACUUM_SCHEMA, STATE from .schema_legacy import PLATFORM_SCHEMA_LEGACY, async_setup_entity_legacy from .schema_state import PLATFORM_SCHEMA_STATE, async_setup_entity_state def validate_mqtt_vacuum(value): """Validate MQTT vacuum schema.""" schemas = {LEGACY: PLATFORM_SCHEMA_LEGACY, STATE: PLATFORM_SCHEMA_STATE} return schemas[value[CONF_SCHEMA]](value) PLATFORM_SCHEMA = vol.All( MQTT_VACUUM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), validate_mqtt_vacuum ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up MQTT vacuum through configuration.yaml.""" await async_setup_reload_service(hass, MQTT_DOMAIN, PLATFORMS) await _async_setup_entity(async_add_entities, config) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up MQTT vacuum dynamically through MQTT discovery.""" setup = functools.partial( _async_setup_entity, async_add_entities, config_entry=config_entry ) await async_setup_entry_helper(hass, DOMAIN, setup, PLATFORM_SCHEMA) async def _async_setup_entity( async_add_entities, config, config_entry=None, discovery_data=None ): """Set up the MQTT vacuum.""" setup_entity = {LEGACY: async_setup_entity_legacy, STATE: async_setup_entity_state} await setup_entity[config[CONF_SCHEMA]]( config, async_add_entities, config_entry, discovery_data )
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/mqtt/vacuum/__init__.py
"""Reproduce an Input select state.""" from __future__ import annotations import asyncio import logging from types import MappingProxyType from typing import Any, Iterable from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import Context, HomeAssistant, State from . import ( ATTR_OPTION, ATTR_OPTIONS, DOMAIN, SERVICE_SELECT_OPTION, SERVICE_SET_OPTIONS, ) ATTR_GROUP = [ATTR_OPTION, ATTR_OPTIONS] _LOGGER = logging.getLogger(__name__) async def _async_reproduce_state( hass: HomeAssistant, state: State, *, context: Context | None = None, reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce a single state.""" cur_state = hass.states.get(state.entity_id) # Return if we can't find entity if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return # Return if we are already at the right state. if cur_state.state == state.state and all( check_attr_equal(cur_state.attributes, state.attributes, attr) for attr in ATTR_GROUP ): return # Set service data service_data = {ATTR_ENTITY_ID: state.entity_id} # If options are specified, call SERVICE_SET_OPTIONS if ATTR_OPTIONS in state.attributes: service = SERVICE_SET_OPTIONS service_data[ATTR_OPTIONS] = state.attributes[ATTR_OPTIONS] await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) # Remove ATTR_OPTIONS from service_data so we can reuse service_data in next call del service_data[ATTR_OPTIONS] # Call SERVICE_SELECT_OPTION service = SERVICE_SELECT_OPTION service_data[ATTR_OPTION] = state.state await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) async def async_reproduce_states( hass: HomeAssistant, states: Iterable[State], *, context: Context | None = None, reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce Input select states.""" # Reproduce states in parallel. await asyncio.gather( *( _async_reproduce_state( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) ) def check_attr_equal( attr1: MappingProxyType, attr2: MappingProxyType, attr_str: str ) -> bool: """Return true if the given attributes are equal.""" return attr1.get(attr_str) == attr2.get(attr_str)
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/input_select/reproduce_state.py
"""Support for OpenTherm Gateway devices.""" import asyncio from datetime import date, datetime import logging import pyotgw import pyotgw.vars as gw_vars import voluptuous as vol from homeassistant.components.binary_sensor import DOMAIN as COMP_BINARY_SENSOR from homeassistant.components.climate import DOMAIN as COMP_CLIMATE from homeassistant.components.sensor import DOMAIN as COMP_SENSOR from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( ATTR_DATE, ATTR_ID, ATTR_MODE, ATTR_TEMPERATURE, ATTR_TIME, CONF_DEVICE, CONF_ID, CONF_NAME, EVENT_HOMEASSISTANT_STOP, PRECISION_HALVES, PRECISION_TENTHS, PRECISION_WHOLE, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import ( async_get_registry as async_get_dev_reg, ) from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( ATTR_CH_OVRD, ATTR_DHW_OVRD, ATTR_GW_ID, ATTR_LEVEL, CONF_CLIMATE, CONF_FLOOR_TEMP, CONF_PRECISION, CONF_READ_PRECISION, CONF_SET_PRECISION, DATA_GATEWAYS, DATA_OPENTHERM_GW, DOMAIN, SERVICE_RESET_GATEWAY, SERVICE_SET_CH_OVRD, SERVICE_SET_CLOCK, SERVICE_SET_CONTROL_SETPOINT, SERVICE_SET_GPIO_MODE, SERVICE_SET_HOT_WATER_OVRD, SERVICE_SET_HOT_WATER_SETPOINT, SERVICE_SET_LED_MODE, SERVICE_SET_MAX_MOD, SERVICE_SET_OAT, SERVICE_SET_SB_TEMP, ) _LOGGER = logging.getLogger(__name__) CLIMATE_SCHEMA = vol.Schema( { vol.Optional(CONF_PRECISION): vol.In( [PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE] ), vol.Optional(CONF_FLOOR_TEMP, default=False): cv.boolean, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: cv.schema_with_slug_keys( { vol.Required(CONF_DEVICE): cv.string, vol.Optional(CONF_CLIMATE, default={}): CLIMATE_SCHEMA, vol.Optional(CONF_NAME): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) async def options_updated(hass, entry): """Handle options update.""" gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] async_dispatcher_send(hass, gateway.options_update_signal, entry) async def async_setup_entry(hass, config_entry): """Set up the OpenTherm Gateway component.""" if DATA_OPENTHERM_GW not in hass.data: hass.data[DATA_OPENTHERM_GW] = {DATA_GATEWAYS: {}} gateway = OpenThermGatewayDevice(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway if config_entry.options.get(CONF_PRECISION): migrate_options = dict(config_entry.options) migrate_options.update( { CONF_READ_PRECISION: config_entry.options[CONF_PRECISION], CONF_SET_PRECISION: config_entry.options[CONF_PRECISION], } ) del migrate_options[CONF_PRECISION] hass.config_entries.async_update_entry(config_entry, options=migrate_options) config_entry.add_update_listener(options_updated) # Schedule directly on the loop to avoid blocking HA startup. hass.loop.create_task(gateway.connect_and_subscribe()) for comp in [COMP_BINARY_SENSOR, COMP_CLIMATE, COMP_SENSOR]: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, comp) ) register_services(hass) return True async def async_setup(hass, config): """Set up the OpenTherm Gateway component.""" if not hass.config_entries.async_entries(DOMAIN) and DOMAIN in config: conf = config[DOMAIN] for device_id, device_config in conf.items(): device_config[CONF_ID] = device_id hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=device_config ) ) return True def register_services(hass): """Register services for the component.""" service_reset_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ) } ) service_set_central_heating_ovrd_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_CH_OVRD): cv.boolean, } ) service_set_clock_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Optional(ATTR_DATE, default=date.today()): cv.date, vol.Optional(ATTR_TIME, default=datetime.now().time()): cv.time, } ) service_set_control_setpoint_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=90) ), } ) service_set_hot_water_setpoint_schema = service_set_control_setpoint_schema service_set_hot_water_ovrd_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_DHW_OVRD): vol.Any( vol.Equal("A"), vol.All(vol.Coerce(int), vol.Range(min=0, max=1)) ), } ) service_set_gpio_mode_schema = vol.Schema( vol.Any( vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("A"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=6) ), } ), vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("B"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=7) ), } ), ) ) service_set_led_mode_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.In("ABCDEF"), vol.Required(ATTR_MODE): vol.In("RXTBOFHWCEMP"), } ) service_set_max_mod_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_LEVEL): vol.All( vol.Coerce(int), vol.Range(min=-1, max=100) ), } ) service_set_oat_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=-40, max=99) ), } ) service_set_sb_temp_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=30) ), } ) async def reset_gateway(call): """Reset the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] mode_rst = gw_vars.OTGW_MODE_RESET status = await gw_dev.gateway.set_mode(mode_rst) gw_dev.status = status async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_RESET_GATEWAY, reset_gateway, service_reset_schema ) async def set_ch_ovrd(call): """Set the central heating override on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] await gw_dev.gateway.set_ch_enable_bit(1 if call.data[ATTR_CH_OVRD] else 0) hass.services.async_register( DOMAIN, SERVICE_SET_CH_OVRD, set_ch_ovrd, service_set_central_heating_ovrd_schema, ) async def set_control_setpoint(call): """Set the control setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_CONTROL_SETPOINT value = await gw_dev.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_CONTROL_SETPOINT, set_control_setpoint, service_set_control_setpoint_schema, ) async def set_dhw_ovrd(call): """Set the domestic hot water override on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_DHW_OVRD value = await gw_dev.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_OVRD, set_dhw_ovrd, service_set_hot_water_ovrd_schema, ) async def set_dhw_setpoint(call): """Set the domestic hot water setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_DHW_SETPOINT value = await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_SETPOINT, set_dhw_setpoint, service_set_hot_water_setpoint_schema, ) async def set_device_clock(call): """Set the clock on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] attr_date = call.data[ATTR_DATE] attr_time = call.data[ATTR_TIME] await gw_dev.gateway.set_clock(datetime.combine(attr_date, attr_time)) hass.services.async_register( DOMAIN, SERVICE_SET_CLOCK, set_device_clock, service_set_clock_schema ) async def set_gpio_mode(call): """Set the OpenTherm Gateway GPIO modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gpio_id = call.data[ATTR_ID] gpio_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_gpio_mode(gpio_id, gpio_mode) gpio_var = getattr(gw_vars, f"OTGW_GPIO_{gpio_id}") gw_dev.status.update({gpio_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema ) async def set_led_mode(call): """Set the OpenTherm Gateway LED modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] led_id = call.data[ATTR_ID] led_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_led_mode(led_id, led_mode) led_var = getattr(gw_vars, f"OTGW_LED_{led_id}") gw_dev.status.update({led_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema ) async def set_max_mod(call): """Set the max modulation level.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD level = call.data[ATTR_LEVEL] if level == -1: # Backend only clears setting on non-numeric values. level = "-" value = await gw_dev.gateway.set_max_relative_mod(level) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_MAX_MOD, set_max_mod, service_set_max_mod_schema ) async def set_outside_temp(call): """Provide the outside temperature to the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_OUTSIDE_TEMP value = await gw_dev.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_OAT, set_outside_temp, service_set_oat_schema ) async def set_setback_temp(call): """Set the OpenTherm Gateway SetBack temperature.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_SB_TEMP value = await gw_dev.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_SB_TEMP, set_setback_temp, service_set_sb_temp_schema ) async def async_unload_entry(hass, entry): """Cleanup and disconnect from gateway.""" await asyncio.gather( hass.config_entries.async_forward_entry_unload(entry, COMP_BINARY_SENSOR), hass.config_entries.async_forward_entry_unload(entry, COMP_CLIMATE), hass.config_entries.async_forward_entry_unload(entry, COMP_SENSOR), ) gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] await gateway.cleanup() return True class OpenThermGatewayDevice: """OpenTherm Gateway device class.""" def __init__(self, hass, config_entry): """Initialize the OpenTherm Gateway.""" self.hass = hass self.device_path = config_entry.data[CONF_DEVICE] self.gw_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] self.climate_config = config_entry.options self.config_entry_id = config_entry.entry_id self.status = {} self.update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_update" self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_options_update" self.gateway = pyotgw.pyotgw() self.gw_version = None async def cleanup(self, event=None): """Reset overrides on the gateway.""" await self.gateway.set_control_setpoint(0) await self.gateway.set_max_relative_mod("-") await self.gateway.disconnect() async def connect_and_subscribe(self): """Connect to serial device and subscribe report handler.""" self.status = await self.gateway.connect(self.hass.loop, self.device_path) version_string = self.status[gw_vars.OTGW].get(gw_vars.OTGW_ABOUT) self.gw_version = version_string[18:] if version_string else None _LOGGER.debug( "Connected to OpenTherm Gateway %s at %s", self.gw_version, self.device_path ) dev_reg = await async_get_dev_reg(self.hass) gw_dev = dev_reg.async_get_or_create( config_entry_id=self.config_entry_id, identifiers={(DOMAIN, self.gw_id)}, name=self.name, manufacturer="Schelte Bron", model="OpenTherm Gateway", sw_version=self.gw_version, ) if gw_dev.sw_version != self.gw_version: dev_reg.async_update_device(gw_dev.id, sw_version=self.gw_version) self.hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, self.cleanup) async def handle_report(status): """Handle reports from the OpenTherm Gateway.""" _LOGGER.debug("Received report: %s", status) self.status = status async_dispatcher_send(self.hass, self.update_signal, status) self.gateway.subscribe(handle_report)
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/opentherm_gw/__init__.py
"""Constants for the AVM Fritz!Box integration.""" import logging ATTR_STATE_BATTERY_LOW = "battery_low" ATTR_STATE_DEVICE_LOCKED = "device_locked" ATTR_STATE_HOLIDAY_MODE = "holiday_mode" ATTR_STATE_LOCKED = "locked" ATTR_STATE_SUMMER_MODE = "summer_mode" ATTR_STATE_WINDOW_OPEN = "window_open" ATTR_TEMPERATURE_UNIT = "temperature_unit" ATTR_TOTAL_CONSUMPTION = "total_consumption" ATTR_TOTAL_CONSUMPTION_UNIT = "total_consumption_unit" CONF_CONNECTIONS = "connections" DEFAULT_HOST = "fritz.box" DEFAULT_USERNAME = "admin" DOMAIN = "fritzbox" LOGGER = logging.getLogger(__package__) PLATFORMS = ["binary_sensor", "climate", "switch", "sensor"]
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/fritzbox/const.py
"""Support for ESPHome lights.""" from __future__ import annotations from aioesphomeapi import LightInfo, LightState from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_WHITE_VALUE, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.color as color_util from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry FLASH_LENGTHS = {FLASH_SHORT: 2, FLASH_LONG: 10} async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up ESPHome lights based on a config entry.""" await platform_async_setup_entry( hass, entry, async_add_entities, component_key="light", info_type=LightInfo, entity_type=EsphomeLight, state_type=LightState, ) class EsphomeLight(EsphomeEntity, LightEntity): """A switch implementation for ESPHome.""" @property def _static_info(self) -> LightInfo: return super()._static_info @property def _state(self) -> LightState | None: return super()._state # https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property # pylint: disable=invalid-overridden-method @esphome_state_property def is_on(self) -> bool | None: """Return true if the switch is on.""" return self._state.state async def async_turn_on(self, **kwargs) -> None: """Turn the entity on.""" data = {"key": self._static_info.key, "state": True} if ATTR_HS_COLOR in kwargs: hue, sat = kwargs[ATTR_HS_COLOR] red, green, blue = color_util.color_hsv_to_RGB(hue, sat, 100) data["rgb"] = (red / 255, green / 255, blue / 255) if ATTR_FLASH in kwargs: data["flash_length"] = FLASH_LENGTHS[kwargs[ATTR_FLASH]] if ATTR_TRANSITION in kwargs: data["transition_length"] = kwargs[ATTR_TRANSITION] if ATTR_BRIGHTNESS in kwargs: data["brightness"] = kwargs[ATTR_BRIGHTNESS] / 255 if ATTR_COLOR_TEMP in kwargs: data["color_temperature"] = kwargs[ATTR_COLOR_TEMP] if ATTR_EFFECT in kwargs: data["effect"] = kwargs[ATTR_EFFECT] if ATTR_WHITE_VALUE in kwargs: data["white"] = kwargs[ATTR_WHITE_VALUE] / 255 await self._client.light_command(**data) async def async_turn_off(self, **kwargs) -> None: """Turn the entity off.""" data = {"key": self._static_info.key, "state": False} if ATTR_FLASH in kwargs: data["flash_length"] = FLASH_LENGTHS[kwargs[ATTR_FLASH]] if ATTR_TRANSITION in kwargs: data["transition_length"] = kwargs[ATTR_TRANSITION] await self._client.light_command(**data) @esphome_state_property def brightness(self) -> int | None: """Return the brightness of this light between 0..255.""" return round(self._state.brightness * 255) @esphome_state_property def hs_color(self) -> tuple[float, float] | None: """Return the hue and saturation color value [float, float].""" return color_util.color_RGB_to_hs( self._state.red * 255, self._state.green * 255, self._state.blue * 255 ) @esphome_state_property def color_temp(self) -> float | None: """Return the CT color value in mireds.""" return self._state.color_temperature @esphome_state_property def white_value(self) -> int | None: """Return the white value of this light between 0..255.""" return round(self._state.white * 255) @esphome_state_property def effect(self) -> str | None: """Return the current effect.""" return self._state.effect @property def supported_features(self) -> int: """Flag supported features.""" flags = SUPPORT_FLASH if self._static_info.supports_brightness: flags |= SUPPORT_BRIGHTNESS flags |= SUPPORT_TRANSITION if self._static_info.supports_rgb: flags |= SUPPORT_COLOR if self._static_info.supports_white_value: flags |= SUPPORT_WHITE_VALUE if self._static_info.supports_color_temperature: flags |= SUPPORT_COLOR_TEMP if self._static_info.effects: flags |= SUPPORT_EFFECT return flags @property def effect_list(self) -> list[str]: """Return the list of supported effects.""" return self._static_info.effects @property def min_mireds(self) -> float: """Return the coldest color_temp that this light supports.""" return self._static_info.min_mireds @property def max_mireds(self) -> float: """Return the warmest color_temp that this light supports.""" return self._static_info.max_mireds
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/esphome/light.py
"""This platform provides support for sensor data from RainMachine.""" from functools import partial from typing import Callable from regenmaschine.controller import Controller from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import TEMP_CELSIUS, VOLUME_CUBIC_METERS from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import RainMachineEntity from .const import ( DATA_CONTROLLER, DATA_COORDINATOR, DATA_PROVISION_SETTINGS, DATA_RESTRICTIONS_UNIVERSAL, DOMAIN, ) TYPE_FLOW_SENSOR_CLICK_M3 = "flow_sensor_clicks_cubic_meter" TYPE_FLOW_SENSOR_CONSUMED_LITERS = "flow_sensor_consumed_liters" TYPE_FLOW_SENSOR_START_INDEX = "flow_sensor_start_index" TYPE_FLOW_SENSOR_WATERING_CLICKS = "flow_sensor_watering_clicks" TYPE_FREEZE_TEMP = "freeze_protect_temp" SENSORS = { TYPE_FLOW_SENSOR_CLICK_M3: ( "Flow Sensor Clicks", "mdi:water-pump", f"clicks/{VOLUME_CUBIC_METERS}", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FLOW_SENSOR_CONSUMED_LITERS: ( "Flow Sensor Consumed Liters", "mdi:water-pump", "liter", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FLOW_SENSOR_START_INDEX: ( "Flow Sensor Start Index", "mdi:water-pump", "index", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FLOW_SENSOR_WATERING_CLICKS: ( "Flow Sensor Clicks", "mdi:water-pump", "clicks", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FREEZE_TEMP: ( "Freeze Protect Temperature", "mdi:thermometer", TEMP_CELSIUS, "temperature", True, DATA_RESTRICTIONS_UNIVERSAL, ), } async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable ) -> None: """Set up RainMachine sensors based on a config entry.""" controller = hass.data[DOMAIN][DATA_CONTROLLER][entry.entry_id] coordinators = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id] @callback def async_get_sensor(api_category: str) -> partial: """Generate the appropriate sensor object for an API category.""" if api_category == DATA_PROVISION_SETTINGS: return partial( ProvisionSettingsSensor, coordinators[DATA_PROVISION_SETTINGS], ) return partial( UniversalRestrictionsSensor, coordinators[DATA_RESTRICTIONS_UNIVERSAL], ) async_add_entities( [ async_get_sensor(api_category)( controller, sensor_type, name, icon, unit, device_class, enabled_by_default, ) for ( sensor_type, (name, icon, unit, device_class, enabled_by_default, api_category), ) in SENSORS.items() ] ) class RainMachineSensor(RainMachineEntity, SensorEntity): """Define a general RainMachine sensor.""" def __init__( self, coordinator: DataUpdateCoordinator, controller: Controller, sensor_type: str, name: str, icon: str, unit: str, device_class: str, enabled_by_default: bool, ) -> None: """Initialize.""" super().__init__(coordinator, controller) self._device_class = device_class self._enabled_by_default = enabled_by_default self._icon = icon self._name = name self._sensor_type = sensor_type self._state = None self._unit = unit @property def entity_registry_enabled_default(self) -> bool: """Determine whether an entity is enabled by default.""" return self._enabled_by_default @property def icon(self) -> str: """Return the icon.""" return self._icon @property def state(self) -> str: """Return the name of the entity.""" return self._state @property def unique_id(self) -> str: """Return a unique, Home Assistant friendly identifier for this entity.""" return f"{self._unique_id}_{self._sensor_type}" @property def unit_of_measurement(self) -> str: """Return the unit the value is expressed in.""" return self._unit class ProvisionSettingsSensor(RainMachineSensor): """Define a sensor that handles provisioning data.""" @callback def update_from_latest_data(self) -> None: """Update the state.""" if self._sensor_type == TYPE_FLOW_SENSOR_CLICK_M3: self._state = self.coordinator.data["system"].get( "flowSensorClicksPerCubicMeter" ) elif self._sensor_type == TYPE_FLOW_SENSOR_CONSUMED_LITERS: clicks = self.coordinator.data["system"].get("flowSensorWateringClicks") clicks_per_m3 = self.coordinator.data["system"].get( "flowSensorClicksPerCubicMeter" ) if clicks and clicks_per_m3: self._state = (clicks * 1000) / clicks_per_m3 else: self._state = None elif self._sensor_type == TYPE_FLOW_SENSOR_START_INDEX: self._state = self.coordinator.data["system"].get("flowSensorStartIndex") elif self._sensor_type == TYPE_FLOW_SENSOR_WATERING_CLICKS: self._state = self.coordinator.data["system"].get( "flowSensorWateringClicks" ) class UniversalRestrictionsSensor(RainMachineSensor): """Define a sensor that handles universal restrictions data.""" @callback def update_from_latest_data(self) -> None: """Update the state.""" if self._sensor_type == TYPE_FREEZE_TEMP: self._state = self.coordinator.data["freezeProtectTemp"]
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/rainmachine/sensor.py
"""Define a config flow manager for AirVisual.""" import asyncio from pyairvisual import CloudAPI, NodeSamba from pyairvisual.errors import ( AirVisualError, InvalidKeyError, NodeProError, NotFoundError, ) import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_API_KEY, CONF_IP_ADDRESS, CONF_LATITUDE, CONF_LONGITUDE, CONF_PASSWORD, CONF_SHOW_ON_MAP, CONF_STATE, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from . import async_get_geography_id from .const import ( CONF_CITY, CONF_COUNTRY, CONF_INTEGRATION_TYPE, DOMAIN, INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, INTEGRATION_TYPE_NODE_PRO, LOGGER, ) API_KEY_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): cv.string}) GEOGRAPHY_NAME_SCHEMA = API_KEY_DATA_SCHEMA.extend( { vol.Required(CONF_CITY): cv.string, vol.Required(CONF_STATE): cv.string, vol.Required(CONF_COUNTRY): cv.string, } ) NODE_PRO_SCHEMA = vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): cv.string} ) PICK_INTEGRATION_TYPE_SCHEMA = vol.Schema( { vol.Required("type"): vol.In( [ INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, INTEGRATION_TYPE_NODE_PRO, ] ) } ) class AirVisualFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle an AirVisual config flow.""" VERSION = 2 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL def __init__(self): """Initialize the config flow.""" self._entry_data_for_reauth = None self._geo_id = None @property def geography_coords_schema(self): """Return the data schema for the cloud API.""" return API_KEY_DATA_SCHEMA.extend( { vol.Required( CONF_LATITUDE, default=self.hass.config.latitude ): cv.latitude, vol.Required( CONF_LONGITUDE, default=self.hass.config.longitude ): cv.longitude, } ) async def _async_finish_geography(self, user_input, integration_type): """Validate a Cloud API key.""" websession = aiohttp_client.async_get_clientsession(self.hass) cloud_api = CloudAPI(user_input[CONF_API_KEY], session=websession) # If this is the first (and only the first) time we've seen this API key, check # that it's valid: valid_keys = self.hass.data.setdefault("airvisual_checked_api_keys", set()) valid_keys_lock = self.hass.data.setdefault( "airvisual_checked_api_keys_lock", asyncio.Lock() ) if integration_type == INTEGRATION_TYPE_GEOGRAPHY_COORDS: coro = cloud_api.air_quality.nearest_city() error_schema = self.geography_coords_schema error_step = "geography_by_coords" else: coro = cloud_api.air_quality.city( user_input[CONF_CITY], user_input[CONF_STATE], user_input[CONF_COUNTRY] ) error_schema = GEOGRAPHY_NAME_SCHEMA error_step = "geography_by_name" async with valid_keys_lock: if user_input[CONF_API_KEY] not in valid_keys: try: await coro except InvalidKeyError: return self.async_show_form( step_id=error_step, data_schema=error_schema, errors={CONF_API_KEY: "invalid_api_key"}, ) except NotFoundError: return self.async_show_form( step_id=error_step, data_schema=error_schema, errors={CONF_CITY: "location_not_found"}, ) except AirVisualError as err: LOGGER.error(err) return self.async_show_form( step_id=error_step, data_schema=error_schema, errors={"base": "unknown"}, ) valid_keys.add(user_input[CONF_API_KEY]) existing_entry = await self.async_set_unique_id(self._geo_id) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=user_input) return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=f"Cloud API ({self._geo_id})", data={**user_input, CONF_INTEGRATION_TYPE: integration_type}, ) async def _async_init_geography(self, user_input, integration_type): """Handle the initialization of the integration via the cloud API.""" self._geo_id = async_get_geography_id(user_input) await self._async_set_unique_id(self._geo_id) self._abort_if_unique_id_configured() return await self._async_finish_geography(user_input, integration_type) async def _async_set_unique_id(self, unique_id): """Set the unique ID of the config flow and abort if it already exists.""" await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() @staticmethod @callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return AirVisualOptionsFlowHandler(config_entry) async def async_step_geography_by_coords(self, user_input=None): """Handle the initialization of the cloud API based on latitude/longitude.""" if not user_input: return self.async_show_form( step_id="geography_by_coords", data_schema=self.geography_coords_schema ) return await self._async_init_geography( user_input, INTEGRATION_TYPE_GEOGRAPHY_COORDS ) async def async_step_geography_by_name(self, user_input=None): """Handle the initialization of the cloud API based on city/state/country.""" if not user_input: return self.async_show_form( step_id="geography_by_name", data_schema=GEOGRAPHY_NAME_SCHEMA ) return await self._async_init_geography( user_input, INTEGRATION_TYPE_GEOGRAPHY_NAME ) async def async_step_node_pro(self, user_input=None): """Handle the initialization of the integration with a Node/Pro.""" if not user_input: return self.async_show_form(step_id="node_pro", data_schema=NODE_PRO_SCHEMA) await self._async_set_unique_id(user_input[CONF_IP_ADDRESS]) node = NodeSamba(user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD]) try: await node.async_connect() except NodeProError as err: LOGGER.error("Error connecting to Node/Pro unit: %s", err) return self.async_show_form( step_id="node_pro", data_schema=NODE_PRO_SCHEMA, errors={CONF_IP_ADDRESS: "cannot_connect"}, ) await node.async_disconnect() return self.async_create_entry( title=f"Node/Pro ({user_input[CONF_IP_ADDRESS]})", data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO}, ) async def async_step_reauth(self, data): """Handle configuration by re-auth.""" self._entry_data_for_reauth = data self._geo_id = async_get_geography_id(data) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm(self, user_input=None): """Handle re-auth completion.""" if not user_input: return self.async_show_form( step_id="reauth_confirm", data_schema=API_KEY_DATA_SCHEMA ) conf = {CONF_API_KEY: user_input[CONF_API_KEY], **self._entry_data_for_reauth} return await self._async_finish_geography( conf, self._entry_data_for_reauth[CONF_INTEGRATION_TYPE] ) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return self.async_show_form( step_id="user", data_schema=PICK_INTEGRATION_TYPE_SCHEMA ) if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY_COORDS: return await self.async_step_geography_by_coords() if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY_NAME: return await self.async_step_geography_by_name() return await self.async_step_node_pro() class AirVisualOptionsFlowHandler(config_entries.OptionsFlow): """Handle an AirVisual options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required( CONF_SHOW_ON_MAP, default=self.config_entry.options.get(CONF_SHOW_ON_MAP), ): bool } ), )
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/airvisual/config_flow.py
"""Brother helpers functions.""" import logging import pysnmp.hlapi.asyncio as hlapi from pysnmp.hlapi.asyncio.cmdgen import lcd from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import callback from homeassistant.helpers import singleton from .const import DOMAIN, SNMP _LOGGER = logging.getLogger(__name__) @singleton.singleton("snmp_engine") def get_snmp_engine(hass): """Get SNMP engine.""" _LOGGER.debug("Creating SNMP engine") snmp_engine = hlapi.SnmpEngine() @callback def shutdown_listener(ev): if hass.data.get(DOMAIN): _LOGGER.debug("Unconfiguring SNMP engine") lcd.unconfigure(hass.data[DOMAIN][SNMP], None) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_listener) return snmp_engine
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/brother/utils.py
"""The DirecTV integration.""" from __future__ import annotations import asyncio from datetime import timedelta from typing import Any from directv import DIRECTV, DIRECTVError from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_NAME, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity import Entity from .const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, ATTR_SOFTWARE_VERSION, ATTR_VIA_DEVICE, DOMAIN, ) CONFIG_SCHEMA = cv.deprecated(DOMAIN) PLATFORMS = ["media_player", "remote"] SCAN_INTERVAL = timedelta(seconds=30) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up DirecTV from a config entry.""" dtv = DIRECTV(entry.data[CONF_HOST], session=async_get_clientsession(hass)) try: await dtv.update() except DIRECTVError as err: raise ConfigEntryNotReady from err hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = dtv for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok class DIRECTVEntity(Entity): """Defines a base DirecTV entity.""" def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None: """Initialize the DirecTV entity.""" self._address = address self._device_id = address if address != "0" else dtv.device.info.receiver_id self._is_client = address != "0" self._name = name self.dtv = dtv @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def device_info(self) -> dict[str, Any]: """Return device information about this DirecTV receiver.""" return { ATTR_IDENTIFIERS: {(DOMAIN, self._device_id)}, ATTR_NAME: self.name, ATTR_MANUFACTURER: self.dtv.device.info.brand, ATTR_MODEL: None, ATTR_SOFTWARE_VERSION: self.dtv.device.info.version, ATTR_VIA_DEVICE: (DOMAIN, self.dtv.device.info.receiver_id), }
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/directv/__init__.py
"""Support for interface with a Gree climate systems.""" from __future__ import annotations from homeassistant.components.switch import DEVICE_CLASS_SWITCH, SwitchEntity from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import COORDINATOR, DOMAIN async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Gree HVAC device from a config entry.""" async_add_entities( [ GreeSwitchEntity(coordinator) for coordinator in hass.data[DOMAIN][COORDINATOR] ] ) class GreeSwitchEntity(CoordinatorEntity, SwitchEntity): """Representation of a Gree HVAC device.""" def __init__(self, coordinator): """Initialize the Gree device.""" super().__init__(coordinator) self._name = coordinator.device.device_info.name + " Panel Light" self._mac = coordinator.device.device_info.mac @property def name(self) -> str: """Return the name of the device.""" return self._name @property def unique_id(self) -> str: """Return a unique id for the device.""" return f"{self._mac}-panel-light" @property def icon(self) -> str | None: """Return the icon for the device.""" return "mdi:lightbulb" @property def device_info(self): """Return device specific attributes.""" return { "name": self._name, "identifiers": {(DOMAIN, self._mac)}, "manufacturer": "Gree", "connections": {(CONNECTION_NETWORK_MAC, self._mac)}, } @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_SWITCH @property def is_on(self) -> bool: """Return if the light is turned on.""" return self.coordinator.device.light async def async_turn_on(self, **kwargs): """Turn the entity on.""" self.coordinator.device.light = True await self.coordinator.push_state_update() self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the entity off.""" self.coordinator.device.light = False await self.coordinator.push_state_update() self.async_write_ha_state()
"""The tests for the mochad light platform.""" import unittest.mock as mock import pytest from homeassistant.components import light from homeassistant.components.mochad import light as mochad from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) def pymochad_mock(): """Mock pymochad.""" with mock.patch("homeassistant.components.mochad.light.device") as device: yield device @pytest.fixture def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} return mochad.MochadLight(hass, controller_mock, dev_dict) async def test_setup_adds_proper_devices(hass): """Test if setup adds devices.""" good_config = { "mochad": {}, "light": { "platform": "mochad", "devices": [{"name": "Light1", "address": "a1"}], }, } assert await async_setup_component(hass, light.DOMAIN, good_config) @pytest.mark.parametrize( "brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")] ) async def test_turn_on_with_no_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on() light_mock.light.send_cmd.assert_called_once_with(expected) @pytest.mark.parametrize( "brightness,expected", [ (32, [mock.call("on"), mock.call("dim 25")]), (256, [mock.call("xdim 45")]), (64, [mock.call("xdim 11")]), ], ) async def test_turn_on_with_brightness(light_mock, expected): """Test turn_on.""" light_mock.turn_on(brightness=45) light_mock.light.send_cmd.assert_has_calls(expected) @pytest.mark.parametrize("brightness", [32]) async def test_turn_off(light_mock): """Test turn_off.""" light_mock.turn_off() light_mock.light.send_cmd.assert_called_once_with("off")
w1ll1am23/home-assistant
tests/components/mochad/test_light.py
homeassistant/components/gree/switch.py
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
8