identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload a config entry.
Unload a config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "unload_ok", "=", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "component", ")", "for", "component", "in", "PLATFORMS", "]", ")", ")", "if", "unload_ok", ":", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "entry", ".", "entry_id", ")", "return", "unload_ok" ]
[ 40, 0 ]
[ 53, 20 ]
python
en
['en', 'es', 'en']
True
setup_input
(api: ProgettiHWSWAPI, input_number: int)
Initialize the input pin.
Initialize the input pin.
def setup_input(api: ProgettiHWSWAPI, input_number: int) -> Input: """Initialize the input pin.""" return api.get_input(input_number)
[ "def", "setup_input", "(", "api", ":", "ProgettiHWSWAPI", ",", "input_number", ":", "int", ")", "->", "Input", ":", "return", "api", ".", "get_input", "(", "input_number", ")" ]
[ 56, 0 ]
[ 58, 38 ]
python
en
['en', 'en', 'en']
True
setup_switch
(api: ProgettiHWSWAPI, switch_number: int, mode: str)
Initialize the output pin.
Initialize the output pin.
def setup_switch(api: ProgettiHWSWAPI, switch_number: int, mode: str) -> Relay: """Initialize the output pin.""" return api.get_relay(switch_number, mode)
[ "def", "setup_switch", "(", "api", ":", "ProgettiHWSWAPI", ",", "switch_number", ":", "int", ",", "mode", ":", "str", ")", "->", "Relay", ":", "return", "api", ".", "get_relay", "(", "switch_number", ",", "mode", ")" ]
[ 61, 0 ]
[ 63, 45 ]
python
en
['en', 'en', 'en']
True
validate_input
(data)
Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user.
Validate the user input allows us to connect.
async def validate_input(data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ userid = data.get(CONF_USERNAME) password = data.get(CONF_PASSWORD) prefix = data[CONF_PREFIX] url = _make_url_from_data(data) requires_password = url.startswith("elks://") if requires_password and (not userid or not password): raise InvalidAuth elk = elkm1.Elk( {"url": url, "userid": userid, "password": password, "element_list": ["panel"]} ) elk.connect() if not await async_wait_for_elk_to_sync(elk, VALIDATE_TIMEOUT, url): raise InvalidAuth device_name = data[CONF_PREFIX] if data[CONF_PREFIX] else "ElkM1" # Return info that you want to store in the config entry. return {"title": device_name, CONF_HOST: url, CONF_PREFIX: slugify(prefix)}
[ "async", "def", "validate_input", "(", "data", ")", ":", "userid", "=", "data", ".", "get", "(", "CONF_USERNAME", ")", "password", "=", "data", ".", "get", "(", "CONF_PASSWORD", ")", "prefix", "=", "data", "[", "CONF_PREFIX", "]", "url", "=", "_make_url_from_data", "(", "data", ")", "requires_password", "=", "url", ".", "startswith", "(", "\"elks://\"", ")", "if", "requires_password", "and", "(", "not", "userid", "or", "not", "password", ")", ":", "raise", "InvalidAuth", "elk", "=", "elkm1", ".", "Elk", "(", "{", "\"url\"", ":", "url", ",", "\"userid\"", ":", "userid", ",", "\"password\"", ":", "password", ",", "\"element_list\"", ":", "[", "\"panel\"", "]", "}", ")", "elk", ".", "connect", "(", ")", "if", "not", "await", "async_wait_for_elk_to_sync", "(", "elk", ",", "VALIDATE_TIMEOUT", ",", "url", ")", ":", "raise", "InvalidAuth", "device_name", "=", "data", "[", "CONF_PREFIX", "]", "if", "data", "[", "CONF_PREFIX", "]", "else", "\"ElkM1\"", "# Return info that you want to store in the config entry.", "return", "{", "\"title\"", ":", "device_name", ",", "CONF_HOST", ":", "url", ",", "CONF_PREFIX", ":", "slugify", "(", "prefix", ")", "}" ]
[ 47, 0 ]
[ 73, 79 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.__init__
(self)
Initialize the elkm1 config flow.
Initialize the elkm1 config flow.
def __init__(self): """Initialize the elkm1 config flow.""" self.importing = False
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "importing", "=", "False" ]
[ 92, 4 ]
[ 94, 30 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_user
(self, user_input=None)
Handle the initial step.
Handle the initial step.
async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: if self._url_already_configured(_make_url_from_data(user_input)): return self.async_abort(reason="address_already_configured") try: info = await validate_input(user_input) except asyncio.TimeoutError: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if "base" not in errors: await self.async_set_unique_id(user_input[CONF_PREFIX]) self._abort_if_unique_id_configured() if self.importing: return self.async_create_entry(title=info["title"], data=user_input) return self.async_create_entry( title=info["title"], data={ CONF_HOST: info[CONF_HOST], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_AUTO_CONFIGURE: True, CONF_TEMPERATURE_UNIT: user_input[CONF_TEMPERATURE_UNIT], CONF_PREFIX: info[CONF_PREFIX], }, ) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "if", "self", ".", "_url_already_configured", "(", "_make_url_from_data", "(", "user_input", ")", ")", ":", "return", "self", ".", "async_abort", "(", "reason", "=", "\"address_already_configured\"", ")", "try", ":", "info", "=", "await", "validate_input", "(", "user_input", ")", "except", "asyncio", ".", "TimeoutError", ":", "errors", "[", "\"base\"", "]", "=", "\"cannot_connect\"", "except", "InvalidAuth", ":", "errors", "[", "\"base\"", "]", "=", "\"invalid_auth\"", "except", "Exception", ":", "# pylint: disable=broad-except", "_LOGGER", ".", "exception", "(", "\"Unexpected exception\"", ")", "errors", "[", "\"base\"", "]", "=", "\"unknown\"", "if", "\"base\"", "not", "in", "errors", ":", "await", "self", ".", "async_set_unique_id", "(", "user_input", "[", "CONF_PREFIX", "]", ")", "self", ".", "_abort_if_unique_id_configured", "(", ")", "if", "self", ".", "importing", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "info", "[", "\"title\"", "]", ",", "data", "=", "user_input", ")", "return", "self", ".", "async_create_entry", "(", "title", "=", "info", "[", "\"title\"", "]", ",", "data", "=", "{", "CONF_HOST", ":", "info", "[", "CONF_HOST", "]", ",", "CONF_USERNAME", ":", "user_input", "[", "CONF_USERNAME", "]", ",", "CONF_PASSWORD", ":", "user_input", "[", "CONF_PASSWORD", "]", ",", "CONF_AUTO_CONFIGURE", ":", "True", ",", "CONF_TEMPERATURE_UNIT", ":", "user_input", "[", "CONF_TEMPERATURE_UNIT", "]", ",", "CONF_PREFIX", ":", "info", "[", "CONF_PREFIX", "]", ",", "}", ",", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "DATA_SCHEMA", ",", "errors", "=", "errors", ")" ]
[ 96, 4 ]
[ 135, 9 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_import
(self, user_input)
Handle import.
Handle import.
async def async_step_import(self, user_input): """Handle import.""" self.importing = True return await self.async_step_user(user_input)
[ "async", "def", "async_step_import", "(", "self", ",", "user_input", ")", ":", "self", ".", "importing", "=", "True", "return", "await", "self", ".", "async_step_user", "(", "user_input", ")" ]
[ 137, 4 ]
[ 140, 53 ]
python
en
['en', 'ja', 'en']
False
ConfigFlow._url_already_configured
(self, url)
See if we already have a elkm1 matching user input configured.
See if we already have a elkm1 matching user input configured.
def _url_already_configured(self, url): """See if we already have a elkm1 matching user input configured.""" existing_hosts = { urlparse(entry.data[CONF_HOST]).hostname for entry in self._async_current_entries() } return urlparse(url).hostname in existing_hosts
[ "def", "_url_already_configured", "(", "self", ",", "url", ")", ":", "existing_hosts", "=", "{", "urlparse", "(", "entry", ".", "data", "[", "CONF_HOST", "]", ")", ".", "hostname", "for", "entry", "in", "self", ".", "_async_current_entries", "(", ")", "}", "return", "urlparse", "(", "url", ")", ".", "hostname", "in", "existing_hosts" ]
[ 142, 4 ]
[ 148, 55 ]
python
en
['en', 'en', 'en']
True
calls
(hass)
Track calls to a mock service.
Track calls to a mock service.
def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
[ "def", "calls", "(", "hass", ")", ":", "return", "async_mock_service", "(", "hass", ",", "\"test\"", ",", "\"automation\"", ")" ]
[ 32, 0 ]
[ 34, 57 ]
python
en
['en', 'en', 'en']
True
test_triggers
(hass, tag_setup, calls)
Test tag triggers.
Test tag triggers.
async def test_triggers(hass, tag_setup, calls): """Test tag triggers.""" assert await tag_setup() assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": DOMAIN, TAG_ID: "abc123"}, "action": { "service": "test.automation", "data": {"message": "service called"}, }, } ] }, ) await hass.async_block_till_done() await async_scan_tag(hass, "abc123", None) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["message"] == "service called"
[ "async", "def", "test_triggers", "(", "hass", ",", "tag_setup", ",", "calls", ")", ":", "assert", "await", "tag_setup", "(", ")", "assert", "await", "async_setup_component", "(", "hass", ",", "automation", ".", "DOMAIN", ",", "{", "automation", ".", "DOMAIN", ":", "[", "{", "\"trigger\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "TAG_ID", ":", "\"abc123\"", "}", ",", "\"action\"", ":", "{", "\"service\"", ":", "\"test.automation\"", ",", "\"data\"", ":", "{", "\"message\"", ":", "\"service called\"", "}", ",", "}", ",", "}", "]", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "async_scan_tag", "(", "hass", ",", "\"abc123\"", ",", "None", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "calls", ")", "==", "1", "assert", "calls", "[", "0", "]", ".", "data", "[", "\"message\"", "]", "==", "\"service called\"" ]
[ 37, 0 ]
[ 62, 55 ]
python
en
['en', 'mt', 'en']
True
test_exception_bad_trigger
(hass, calls, caplog)
Test for exception on event triggers firing.
Test for exception on event triggers firing.
async def test_exception_bad_trigger(hass, calls, caplog): """Test for exception on event triggers firing.""" await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"trigger": {"platform": DOMAIN, "oops": "abc123"}}, "action": { "service": "test.automation", "data": {"message": "service called"}, }, } ] }, ) await hass.async_block_till_done() assert "Invalid config for [automation]" in caplog.text
[ "async", "def", "test_exception_bad_trigger", "(", "hass", ",", "calls", ",", "caplog", ")", ":", "await", "async_setup_component", "(", "hass", ",", "automation", ".", "DOMAIN", ",", "{", "automation", ".", "DOMAIN", ":", "[", "{", "\"trigger\"", ":", "{", "\"trigger\"", ":", "{", "\"platform\"", ":", "DOMAIN", ",", "\"oops\"", ":", "\"abc123\"", "}", "}", ",", "\"action\"", ":", "{", "\"service\"", ":", "\"test.automation\"", ",", "\"data\"", ":", "{", "\"message\"", ":", "\"service called\"", "}", ",", "}", ",", "}", "]", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "\"Invalid config for [automation]\"", "in", "caplog", ".", "text" ]
[ 65, 0 ]
[ 84, 59 ]
python
en
['en', 'en', 'en']
True
cal_performance
(pred, gold, trg_pad_idx, smoothing=False)
Apply label smoothing if needed
Apply label smoothing if needed
def cal_performance(pred, gold, trg_pad_idx, smoothing=False): ''' Apply label smoothing if needed ''' loss = cal_loss(pred, gold, trg_pad_idx, smoothing=smoothing) pred = pred.max(1)[1] gold = gold.contiguous().view(-1) non_pad_mask = gold.ne(trg_pad_idx) n_correct = pred.eq(gold).masked_select(non_pad_mask).sum().item() n_word = non_pad_mask.sum().item() return loss, n_correct, n_word
[ "def", "cal_performance", "(", "pred", ",", "gold", ",", "trg_pad_idx", ",", "smoothing", "=", "False", ")", ":", "loss", "=", "cal_loss", "(", "pred", ",", "gold", ",", "trg_pad_idx", ",", "smoothing", "=", "smoothing", ")", "pred", "=", "pred", ".", "max", "(", "1", ")", "[", "1", "]", "gold", "=", "gold", ".", "contiguous", "(", ")", ".", "view", "(", "-", "1", ")", "non_pad_mask", "=", "gold", ".", "ne", "(", "trg_pad_idx", ")", "n_correct", "=", "pred", ".", "eq", "(", "gold", ")", ".", "masked_select", "(", "non_pad_mask", ")", ".", "sum", "(", ")", ".", "item", "(", ")", "n_word", "=", "non_pad_mask", ".", "sum", "(", ")", ".", "item", "(", ")", "return", "loss", ",", "n_correct", ",", "n_word" ]
[ 25, 0 ]
[ 36, 34 ]
python
en
['en', 'en', 'en']
True
cal_loss
(pred, gold, trg_pad_idx, smoothing=False)
Calculate cross entropy loss, apply label smoothing if needed.
Calculate cross entropy loss, apply label smoothing if needed.
def cal_loss(pred, gold, trg_pad_idx, smoothing=False): ''' Calculate cross entropy loss, apply label smoothing if needed. ''' gold = gold.contiguous().view(-1) if smoothing: eps = 0.1 n_class = pred.size(1) one_hot = torch.zeros_like(pred).scatter(1, gold.view(-1, 1), 1) one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1) log_prb = F.log_softmax(pred, dim=1) non_pad_mask = gold.ne(trg_pad_idx) loss = -(one_hot * log_prb).sum(dim=1) loss = loss.masked_select(non_pad_mask).sum() # average later else: loss = F.cross_entropy(pred, gold, ignore_index=trg_pad_idx, reduction='sum') return loss
[ "def", "cal_loss", "(", "pred", ",", "gold", ",", "trg_pad_idx", ",", "smoothing", "=", "False", ")", ":", "gold", "=", "gold", ".", "contiguous", "(", ")", ".", "view", "(", "-", "1", ")", "if", "smoothing", ":", "eps", "=", "0.1", "n_class", "=", "pred", ".", "size", "(", "1", ")", "one_hot", "=", "torch", ".", "zeros_like", "(", "pred", ")", ".", "scatter", "(", "1", ",", "gold", ".", "view", "(", "-", "1", ",", "1", ")", ",", "1", ")", "one_hot", "=", "one_hot", "*", "(", "1", "-", "eps", ")", "+", "(", "1", "-", "one_hot", ")", "*", "eps", "/", "(", "n_class", "-", "1", ")", "log_prb", "=", "F", ".", "log_softmax", "(", "pred", ",", "dim", "=", "1", ")", "non_pad_mask", "=", "gold", ".", "ne", "(", "trg_pad_idx", ")", "loss", "=", "-", "(", "one_hot", "*", "log_prb", ")", ".", "sum", "(", "dim", "=", "1", ")", "loss", "=", "loss", ".", "masked_select", "(", "non_pad_mask", ")", ".", "sum", "(", ")", "# average later", "else", ":", "loss", "=", "F", ".", "cross_entropy", "(", "pred", ",", "gold", ",", "ignore_index", "=", "trg_pad_idx", ",", "reduction", "=", "'sum'", ")", "return", "loss" ]
[ 39, 0 ]
[ 57, 15 ]
python
en
['en', 'en', 'en']
True
train_epoch
(model, training_data, optimizer, opt, device, smoothing)
Epoch operation in training phase
Epoch operation in training phase
def train_epoch(model, training_data, optimizer, opt, device, smoothing): ''' Epoch operation in training phase''' model.train() total_loss, n_word_total, n_word_correct = 0, 0, 0 desc = ' - (Training) ' for batch in tqdm(training_data, mininterval=2, desc=desc, leave=False): # prepare data src_seq = patch_src(batch.src, opt.src_pad_idx).to(device) trg_seq, gold = map(lambda x: x.to(device), patch_trg(batch.trg, opt.trg_pad_idx)) # forward optimizer.zero_grad() pred = model(src_seq, trg_seq) # backward and update parameters loss, n_correct, n_word = cal_performance( pred, gold, opt.trg_pad_idx, smoothing=smoothing) loss.backward() optimizer.step_and_update_lr() # note keeping n_word_total += n_word n_word_correct += n_correct total_loss += loss.item() loss_per_word = total_loss/n_word_total accuracy = n_word_correct/n_word_total return loss_per_word, accuracy
[ "def", "train_epoch", "(", "model", ",", "training_data", ",", "optimizer", ",", "opt", ",", "device", ",", "smoothing", ")", ":", "model", ".", "train", "(", ")", "total_loss", ",", "n_word_total", ",", "n_word_correct", "=", "0", ",", "0", ",", "0", "desc", "=", "' - (Training) '", "for", "batch", "in", "tqdm", "(", "training_data", ",", "mininterval", "=", "2", ",", "desc", "=", "desc", ",", "leave", "=", "False", ")", ":", "# prepare data", "src_seq", "=", "patch_src", "(", "batch", ".", "src", ",", "opt", ".", "src_pad_idx", ")", ".", "to", "(", "device", ")", "trg_seq", ",", "gold", "=", "map", "(", "lambda", "x", ":", "x", ".", "to", "(", "device", ")", ",", "patch_trg", "(", "batch", ".", "trg", ",", "opt", ".", "trg_pad_idx", ")", ")", "# forward", "optimizer", ".", "zero_grad", "(", ")", "pred", "=", "model", "(", "src_seq", ",", "trg_seq", ")", "# backward and update parameters", "loss", ",", "n_correct", ",", "n_word", "=", "cal_performance", "(", "pred", ",", "gold", ",", "opt", ".", "trg_pad_idx", ",", "smoothing", "=", "smoothing", ")", "loss", ".", "backward", "(", ")", "optimizer", ".", "step_and_update_lr", "(", ")", "# note keeping", "n_word_total", "+=", "n_word", "n_word_correct", "+=", "n_correct", "total_loss", "+=", "loss", ".", "item", "(", ")", "loss_per_word", "=", "total_loss", "/", "n_word_total", "accuracy", "=", "n_word_correct", "/", "n_word_total", "return", "loss_per_word", ",", "accuracy" ]
[ 74, 0 ]
[ 104, 34 ]
python
en
['en', 'en', 'en']
True
eval_epoch
(model, validation_data, device, opt)
Epoch operation in evaluation phase
Epoch operation in evaluation phase
def eval_epoch(model, validation_data, device, opt): ''' Epoch operation in evaluation phase ''' model.eval() total_loss, n_word_total, n_word_correct = 0, 0, 0 desc = ' - (Validation) ' with torch.no_grad(): for batch in tqdm(validation_data, mininterval=2, desc=desc, leave=False): # prepare data src_seq = patch_src(batch.src, opt.src_pad_idx).to(device) trg_seq, gold = map(lambda x: x.to(device), patch_trg(batch.trg, opt.trg_pad_idx)) # forward pred = model(src_seq, trg_seq) loss, n_correct, n_word = cal_performance( pred, gold, opt.trg_pad_idx, smoothing=False) # note keeping n_word_total += n_word n_word_correct += n_correct total_loss += loss.item() loss_per_word = total_loss/n_word_total accuracy = n_word_correct/n_word_total return loss_per_word, accuracy
[ "def", "eval_epoch", "(", "model", ",", "validation_data", ",", "device", ",", "opt", ")", ":", "model", ".", "eval", "(", ")", "total_loss", ",", "n_word_total", ",", "n_word_correct", "=", "0", ",", "0", ",", "0", "desc", "=", "' - (Validation) '", "with", "torch", ".", "no_grad", "(", ")", ":", "for", "batch", "in", "tqdm", "(", "validation_data", ",", "mininterval", "=", "2", ",", "desc", "=", "desc", ",", "leave", "=", "False", ")", ":", "# prepare data", "src_seq", "=", "patch_src", "(", "batch", ".", "src", ",", "opt", ".", "src_pad_idx", ")", ".", "to", "(", "device", ")", "trg_seq", ",", "gold", "=", "map", "(", "lambda", "x", ":", "x", ".", "to", "(", "device", ")", ",", "patch_trg", "(", "batch", ".", "trg", ",", "opt", ".", "trg_pad_idx", ")", ")", "# forward", "pred", "=", "model", "(", "src_seq", ",", "trg_seq", ")", "loss", ",", "n_correct", ",", "n_word", "=", "cal_performance", "(", "pred", ",", "gold", ",", "opt", ".", "trg_pad_idx", ",", "smoothing", "=", "False", ")", "# note keeping", "n_word_total", "+=", "n_word", "n_word_correct", "+=", "n_correct", "total_loss", "+=", "loss", ".", "item", "(", ")", "loss_per_word", "=", "total_loss", "/", "n_word_total", "accuracy", "=", "n_word_correct", "/", "n_word_total", "return", "loss_per_word", ",", "accuracy" ]
[ 107, 0 ]
[ 133, 34 ]
python
en
['de', 'en', 'en']
True
train
(model, training_data, validation_data, optimizer, device, opt)
Start training
Start training
def train(model, training_data, validation_data, optimizer, device, opt): ''' Start training ''' # Use tensorboard to plot curves, e.g. perplexity, accuracy, learning rate if opt.use_tb: from torch.utils.tensorboard import SummaryWriter tb_writer = SummaryWriter(log_dir=os.path.join(opt.output_dir, 'tensorboard')) log_train_file = os.path.join(opt.output_dir, 'train.log') log_valid_file = os.path.join(opt.output_dir, 'valid.log') print('[Info] Training performance will be written to file: {} and {}'.format( log_train_file, log_valid_file)) with open(log_train_file, 'w') as log_tf, open(log_valid_file, 'w') as log_vf: log_tf.write('epoch,loss,ppl,accuracy\n') log_vf.write('epoch,loss,ppl,accuracy\n') def print_performances(header, ppl, accu, start_time, lr, Num_parameters): print(' - {header:12} ppl: {ppl: 8.5f}, accuracy: {accu:3.3f} %, lr: {lr:8.5f}, '\ 'elapse: {elapse:3.3f} min, ParameterNumber: {Num_parameters: 8.2f}'.format( header=f"({header})", ppl=ppl, accu=100*accu, elapse=(time.time()-start_time)/60, lr=lr, Num_parameters=Num_parameters)) #valid_accus = [] valid_losses = [] for epoch_i in range(opt.epoch): print('[ Epoch', epoch_i, ']') start = time.time() train_loss, train_accu = train_epoch( model, training_data, optimizer, opt, device, smoothing=opt.label_smoothing) train_ppl = math.exp(min(train_loss, 100)) # Current learning rate lr = optimizer._optimizer.param_groups[0]['lr'] #Calculate Num op parameters of model Num_parameters = count_parameters(model) print_performances('Training', train_ppl, train_accu, start, lr, Num_parameters) start = time.time() valid_loss, valid_accu = eval_epoch(model, validation_data, device, opt) valid_ppl = math.exp(min(valid_loss, 100)) #Calculate Num op parameters of model Num_parameters = count_parameters(model) print_performances('Validation', valid_ppl, valid_accu, start, lr, Num_parameters) valid_losses += [valid_loss] checkpoint = {'epoch': epoch_i, 'settings': opt, 'model': model.state_dict()} if opt.save_mode == 'all': model_name = 'model_accu_{accu:3.3f}.chkpt'.format(accu=100*valid_accu) torch.save(checkpoint, model_name) elif opt.save_mode == 'best': model_name = 'model.chkpt' if valid_loss <= min(valid_losses): torch.save(checkpoint, os.path.join(opt.output_dir, model_name)) print(' - [Info] The checkpoint file has been updated.') with open(log_train_file, 'a') as log_tf, open(log_valid_file, 'a') as log_vf: log_tf.write('{epoch},{loss: 8.5f},{ppl: 8.5f},{accu:3.3f}\n'.format( epoch=epoch_i, loss=train_loss, ppl=train_ppl, accu=100*train_accu)) log_vf.write('{epoch},{loss: 8.5f},{ppl: 8.5f},{accu:3.3f}\n'.format( epoch=epoch_i, loss=valid_loss, ppl=valid_ppl, accu=100*valid_accu)) if opt.use_tb: tb_writer.add_scalars('ppl', {'train': train_ppl, 'val': valid_ppl}, epoch_i) tb_writer.add_scalars('accuracy', {'train': train_accu*100, 'val': valid_accu*100}, epoch_i) tb_writer.add_scalar('learning_rate', lr, epoch_i)
[ "def", "train", "(", "model", ",", "training_data", ",", "validation_data", ",", "optimizer", ",", "device", ",", "opt", ")", ":", "# Use tensorboard to plot curves, e.g. perplexity, accuracy, learning rate", "if", "opt", ".", "use_tb", ":", "from", "torch", ".", "utils", ".", "tensorboard", "import", "SummaryWriter", "tb_writer", "=", "SummaryWriter", "(", "log_dir", "=", "os", ".", "path", ".", "join", "(", "opt", ".", "output_dir", ",", "'tensorboard'", ")", ")", "log_train_file", "=", "os", ".", "path", ".", "join", "(", "opt", ".", "output_dir", ",", "'train.log'", ")", "log_valid_file", "=", "os", ".", "path", ".", "join", "(", "opt", ".", "output_dir", ",", "'valid.log'", ")", "print", "(", "'[Info] Training performance will be written to file: {} and {}'", ".", "format", "(", "log_train_file", ",", "log_valid_file", ")", ")", "with", "open", "(", "log_train_file", ",", "'w'", ")", "as", "log_tf", ",", "open", "(", "log_valid_file", ",", "'w'", ")", "as", "log_vf", ":", "log_tf", ".", "write", "(", "'epoch,loss,ppl,accuracy\\n'", ")", "log_vf", ".", "write", "(", "'epoch,loss,ppl,accuracy\\n'", ")", "def", "print_performances", "(", "header", ",", "ppl", ",", "accu", ",", "start_time", ",", "lr", ",", "Num_parameters", ")", ":", "print", "(", "' - {header:12} ppl: {ppl: 8.5f}, accuracy: {accu:3.3f} %, lr: {lr:8.5f}, '", "'elapse: {elapse:3.3f} min, ParameterNumber: {Num_parameters: 8.2f}'", ".", "format", "(", "header", "=", "f\"({header})\"", ",", "ppl", "=", "ppl", ",", "accu", "=", "100", "*", "accu", ",", "elapse", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "/", "60", ",", "lr", "=", "lr", ",", "Num_parameters", "=", "Num_parameters", ")", ")", "#valid_accus = []", "valid_losses", "=", "[", "]", "for", "epoch_i", "in", "range", "(", "opt", ".", "epoch", ")", ":", "print", "(", "'[ Epoch'", ",", "epoch_i", ",", "']'", ")", "start", "=", "time", ".", "time", "(", ")", "train_loss", ",", "train_accu", "=", "train_epoch", "(", "model", ",", "training_data", ",", "optimizer", ",", "opt", ",", "device", ",", "smoothing", "=", "opt", ".", "label_smoothing", ")", "train_ppl", "=", "math", ".", "exp", "(", "min", "(", "train_loss", ",", "100", ")", ")", "# Current learning rate", "lr", "=", "optimizer", ".", "_optimizer", ".", "param_groups", "[", "0", "]", "[", "'lr'", "]", "#Calculate Num op parameters of model ", "Num_parameters", "=", "count_parameters", "(", "model", ")", "print_performances", "(", "'Training'", ",", "train_ppl", ",", "train_accu", ",", "start", ",", "lr", ",", "Num_parameters", ")", "start", "=", "time", ".", "time", "(", ")", "valid_loss", ",", "valid_accu", "=", "eval_epoch", "(", "model", ",", "validation_data", ",", "device", ",", "opt", ")", "valid_ppl", "=", "math", ".", "exp", "(", "min", "(", "valid_loss", ",", "100", ")", ")", "#Calculate Num op parameters of model", "Num_parameters", "=", "count_parameters", "(", "model", ")", "print_performances", "(", "'Validation'", ",", "valid_ppl", ",", "valid_accu", ",", "start", ",", "lr", ",", "Num_parameters", ")", "valid_losses", "+=", "[", "valid_loss", "]", "checkpoint", "=", "{", "'epoch'", ":", "epoch_i", ",", "'settings'", ":", "opt", ",", "'model'", ":", "model", ".", "state_dict", "(", ")", "}", "if", "opt", ".", "save_mode", "==", "'all'", ":", "model_name", "=", "'model_accu_{accu:3.3f}.chkpt'", ".", "format", "(", "accu", "=", "100", "*", "valid_accu", ")", "torch", ".", "save", "(", "checkpoint", ",", "model_name", ")", "elif", "opt", ".", "save_mode", "==", "'best'", ":", "model_name", "=", "'model.chkpt'", "if", "valid_loss", "<=", "min", "(", "valid_losses", ")", ":", "torch", ".", "save", "(", "checkpoint", ",", "os", ".", "path", ".", "join", "(", "opt", ".", "output_dir", ",", "model_name", ")", ")", "print", "(", "' - [Info] The checkpoint file has been updated.'", ")", "with", "open", "(", "log_train_file", ",", "'a'", ")", "as", "log_tf", ",", "open", "(", "log_valid_file", ",", "'a'", ")", "as", "log_vf", ":", "log_tf", ".", "write", "(", "'{epoch},{loss: 8.5f},{ppl: 8.5f},{accu:3.3f}\\n'", ".", "format", "(", "epoch", "=", "epoch_i", ",", "loss", "=", "train_loss", ",", "ppl", "=", "train_ppl", ",", "accu", "=", "100", "*", "train_accu", ")", ")", "log_vf", ".", "write", "(", "'{epoch},{loss: 8.5f},{ppl: 8.5f},{accu:3.3f}\\n'", ".", "format", "(", "epoch", "=", "epoch_i", ",", "loss", "=", "valid_loss", ",", "ppl", "=", "valid_ppl", ",", "accu", "=", "100", "*", "valid_accu", ")", ")", "if", "opt", ".", "use_tb", ":", "tb_writer", ".", "add_scalars", "(", "'ppl'", ",", "{", "'train'", ":", "train_ppl", ",", "'val'", ":", "valid_ppl", "}", ",", "epoch_i", ")", "tb_writer", ".", "add_scalars", "(", "'accuracy'", ",", "{", "'train'", ":", "train_accu", "*", "100", ",", "'val'", ":", "valid_accu", "*", "100", "}", ",", "epoch_i", ")", "tb_writer", ".", "add_scalar", "(", "'learning_rate'", ",", "lr", ",", "epoch_i", ")" ]
[ 136, 0 ]
[ 210, 62 ]
python
en
['en', 'sn', 'en']
False
main
()
Usage: python train.py -data_pkl m30k_deen_shr.pkl -log m30k_deen_shr -embs_share_weight -proj_share_weight -label_smoothing -output_dir output -b 256 -warmup 128000
Usage: python train.py -data_pkl m30k_deen_shr.pkl -log m30k_deen_shr -embs_share_weight -proj_share_weight -label_smoothing -output_dir output -b 256 -warmup 128000
def main(): ''' Usage: python train.py -data_pkl m30k_deen_shr.pkl -log m30k_deen_shr -embs_share_weight -proj_share_weight -label_smoothing -output_dir output -b 256 -warmup 128000 ''' parser = argparse.ArgumentParser() parser.add_argument('-data_pkl', default=None) # all-in-1 data pickle or bpe field parser.add_argument('-train_path', default=None) # bpe encoded data parser.add_argument('-val_path', default=None) # bpe encoded data parser.add_argument('-epoch', type=int, default=10) parser.add_argument('-b', '--batch_size', type=int, default=2048) parser.add_argument('-d_model', type=int, default=512) parser.add_argument('-d_inner_hid', type=int, default=2048) parser.add_argument('-d_k', type=int, default=64) parser.add_argument('-d_v', type=int, default=64) parser.add_argument('-n_head', type=int, default=8) parser.add_argument('-n_layers', type=int, default=6) parser.add_argument('-warmup','--n_warmup_steps', type=int, default=4000) parser.add_argument('-lr_mul', type=float, default=2.0) parser.add_argument('-seed', type=int, default=None) parser.add_argument('-dropout', type=float, default=0.1) parser.add_argument('-embs_share_weight', action='store_true') parser.add_argument('-proj_share_weight', action='store_true') parser.add_argument('-scale_emb_or_prj', type=str, default='prj') parser.add_argument('-output_dir', type=str, default=None) parser.add_argument('-use_tb', action='store_true') parser.add_argument('-save_mode', type=str, choices=['all', 'best'], default='best') parser.add_argument('-no_cuda', action='store_true') parser.add_argument('-label_smoothing', action='store_true') opt = parser.parse_args() opt.cuda = not opt.no_cuda opt.d_word_vec = opt.d_model # https://pytorch.org/docs/stable/notes/randomness.html # For reproducibility if opt.seed is not None: torch.manual_seed(opt.seed) torch.backends.cudnn.benchmark = False torch.set_deterministic(True) np.random.seed(opt.seed) random.seed(opt.seed) if not opt.output_dir: print('No experiment result will be saved.') raise if not os.path.exists(opt.output_dir): os.makedirs(opt.output_dir) if opt.batch_size < 2048 and opt.n_warmup_steps <= 4000: print('[Warning] The warmup steps may be not enough.\n'\ '(sz_b, warmup) = (2048, 4000) is the official setting.\n'\ 'Using smaller batch w/o longer warmup may cause '\ 'the warmup stage ends with only little data trained.') device = torch.device('cuda' if opt.cuda else 'cpu') #========= Loading Dataset =========# if all((opt.train_path, opt.val_path)): training_data, validation_data = prepare_dataloaders_from_bpe_files(opt, device) elif opt.data_pkl: training_data, validation_data = prepare_dataloaders(opt, device) else: raise print(opt) transformer = Transformer( opt.src_vocab_size, opt.trg_vocab_size, src_pad_idx=opt.src_pad_idx, trg_pad_idx=opt.trg_pad_idx, trg_emb_prj_weight_sharing=opt.proj_share_weight, emb_src_trg_weight_sharing=opt.embs_share_weight, d_k=opt.d_k, d_v=opt.d_v, d_model=opt.d_model, d_word_vec=opt.d_word_vec, d_inner=opt.d_inner_hid, n_layers=opt.n_layers, n_head=opt.n_head, dropout=opt.dropout, scale_emb_or_prj=opt.scale_emb_or_prj).to(device) optimizer = ScheduledOptim( optim.Adam(transformer.parameters(), betas=(0.9, 0.98), eps=1e-09), opt.lr_mul, opt.d_model, opt.n_warmup_steps) train(transformer, training_data, validation_data, optimizer, device, opt)
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'-data_pkl'", ",", "default", "=", "None", ")", "# all-in-1 data pickle or bpe field", "parser", ".", "add_argument", "(", "'-train_path'", ",", "default", "=", "None", ")", "# bpe encoded data", "parser", ".", "add_argument", "(", "'-val_path'", ",", "default", "=", "None", ")", "# bpe encoded data", "parser", ".", "add_argument", "(", "'-epoch'", ",", "type", "=", "int", ",", "default", "=", "10", ")", "parser", ".", "add_argument", "(", "'-b'", ",", "'--batch_size'", ",", "type", "=", "int", ",", "default", "=", "2048", ")", "parser", ".", "add_argument", "(", "'-d_model'", ",", "type", "=", "int", ",", "default", "=", "512", ")", "parser", ".", "add_argument", "(", "'-d_inner_hid'", ",", "type", "=", "int", ",", "default", "=", "2048", ")", "parser", ".", "add_argument", "(", "'-d_k'", ",", "type", "=", "int", ",", "default", "=", "64", ")", "parser", ".", "add_argument", "(", "'-d_v'", ",", "type", "=", "int", ",", "default", "=", "64", ")", "parser", ".", "add_argument", "(", "'-n_head'", ",", "type", "=", "int", ",", "default", "=", "8", ")", "parser", ".", "add_argument", "(", "'-n_layers'", ",", "type", "=", "int", ",", "default", "=", "6", ")", "parser", ".", "add_argument", "(", "'-warmup'", ",", "'--n_warmup_steps'", ",", "type", "=", "int", ",", "default", "=", "4000", ")", "parser", ".", "add_argument", "(", "'-lr_mul'", ",", "type", "=", "float", ",", "default", "=", "2.0", ")", "parser", ".", "add_argument", "(", "'-seed'", ",", "type", "=", "int", ",", "default", "=", "None", ")", "parser", ".", "add_argument", "(", "'-dropout'", ",", "type", "=", "float", ",", "default", "=", "0.1", ")", "parser", ".", "add_argument", "(", "'-embs_share_weight'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'-proj_share_weight'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'-scale_emb_or_prj'", ",", "type", "=", "str", ",", "default", "=", "'prj'", ")", "parser", ".", "add_argument", "(", "'-output_dir'", ",", "type", "=", "str", ",", "default", "=", "None", ")", "parser", ".", "add_argument", "(", "'-use_tb'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'-save_mode'", ",", "type", "=", "str", ",", "choices", "=", "[", "'all'", ",", "'best'", "]", ",", "default", "=", "'best'", ")", "parser", ".", "add_argument", "(", "'-no_cuda'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'-label_smoothing'", ",", "action", "=", "'store_true'", ")", "opt", "=", "parser", ".", "parse_args", "(", ")", "opt", ".", "cuda", "=", "not", "opt", ".", "no_cuda", "opt", ".", "d_word_vec", "=", "opt", ".", "d_model", "# https://pytorch.org/docs/stable/notes/randomness.html", "# For reproducibility", "if", "opt", ".", "seed", "is", "not", "None", ":", "torch", ".", "manual_seed", "(", "opt", ".", "seed", ")", "torch", ".", "backends", ".", "cudnn", ".", "benchmark", "=", "False", "torch", ".", "set_deterministic", "(", "True", ")", "np", ".", "random", ".", "seed", "(", "opt", ".", "seed", ")", "random", ".", "seed", "(", "opt", ".", "seed", ")", "if", "not", "opt", ".", "output_dir", ":", "print", "(", "'No experiment result will be saved.'", ")", "raise", "if", "not", "os", ".", "path", ".", "exists", "(", "opt", ".", "output_dir", ")", ":", "os", ".", "makedirs", "(", "opt", ".", "output_dir", ")", "if", "opt", ".", "batch_size", "<", "2048", "and", "opt", ".", "n_warmup_steps", "<=", "4000", ":", "print", "(", "'[Warning] The warmup steps may be not enough.\\n'", "'(sz_b, warmup) = (2048, 4000) is the official setting.\\n'", "'Using smaller batch w/o longer warmup may cause '", "'the warmup stage ends with only little data trained.'", ")", "device", "=", "torch", ".", "device", "(", "'cuda'", "if", "opt", ".", "cuda", "else", "'cpu'", ")", "#========= Loading Dataset =========#", "if", "all", "(", "(", "opt", ".", "train_path", ",", "opt", ".", "val_path", ")", ")", ":", "training_data", ",", "validation_data", "=", "prepare_dataloaders_from_bpe_files", "(", "opt", ",", "device", ")", "elif", "opt", ".", "data_pkl", ":", "training_data", ",", "validation_data", "=", "prepare_dataloaders", "(", "opt", ",", "device", ")", "else", ":", "raise", "print", "(", "opt", ")", "transformer", "=", "Transformer", "(", "opt", ".", "src_vocab_size", ",", "opt", ".", "trg_vocab_size", ",", "src_pad_idx", "=", "opt", ".", "src_pad_idx", ",", "trg_pad_idx", "=", "opt", ".", "trg_pad_idx", ",", "trg_emb_prj_weight_sharing", "=", "opt", ".", "proj_share_weight", ",", "emb_src_trg_weight_sharing", "=", "opt", ".", "embs_share_weight", ",", "d_k", "=", "opt", ".", "d_k", ",", "d_v", "=", "opt", ".", "d_v", ",", "d_model", "=", "opt", ".", "d_model", ",", "d_word_vec", "=", "opt", ".", "d_word_vec", ",", "d_inner", "=", "opt", ".", "d_inner_hid", ",", "n_layers", "=", "opt", ".", "n_layers", ",", "n_head", "=", "opt", ".", "n_head", ",", "dropout", "=", "opt", ".", "dropout", ",", "scale_emb_or_prj", "=", "opt", ".", "scale_emb_or_prj", ")", ".", "to", "(", "device", ")", "optimizer", "=", "ScheduledOptim", "(", "optim", ".", "Adam", "(", "transformer", ".", "parameters", "(", ")", ",", "betas", "=", "(", "0.9", ",", "0.98", ")", ",", "eps", "=", "1e-09", ")", ",", "opt", ".", "lr_mul", ",", "opt", ".", "d_model", ",", "opt", ".", "n_warmup_steps", ")", "train", "(", "transformer", ",", "training_data", ",", "validation_data", ",", "optimizer", ",", "device", ",", "opt", ")" ]
[ 216, 0 ]
[ 315, 78 ]
python
en
['en', 'ja', 'th']
False
split_index
(params)
Delete index infromation from params
Delete index infromation from params
def split_index(params): """ Delete index infromation from params """ if isinstance(params, dict): if NodeType.INDEX in params.keys(): return split_index(params[NodeType.VALUE]) result = {} for key in params: result[key] = split_index(params[key]) return result else: return params
[ "def", "split_index", "(", "params", ")", ":", "if", "isinstance", "(", "params", ",", "dict", ")", ":", "if", "NodeType", ".", "INDEX", "in", "params", ".", "keys", "(", ")", ":", "return", "split_index", "(", "params", "[", "NodeType", ".", "VALUE", "]", ")", "result", "=", "{", "}", "for", "key", "in", "params", ":", "result", "[", "key", "]", "=", "split_index", "(", "params", "[", "key", "]", ")", "return", "result", "else", ":", "return", "params" ]
[ 46, 0 ]
[ 58, 21 ]
python
en
['en', 'error', 'th']
False
extract_scalar_reward
(value, scalar_key='default')
Extract scalar reward from trial result. Parameters ---------- value : int, float, dict the reported final metric data scalar_key : str the key name that indicates the numeric number Raises ------ RuntimeError Incorrect final result: the final result should be float/int, or a dict which has a key named "default" whose value is float/int.
Extract scalar reward from trial result.
def extract_scalar_reward(value, scalar_key='default'): """ Extract scalar reward from trial result. Parameters ---------- value : int, float, dict the reported final metric data scalar_key : str the key name that indicates the numeric number Raises ------ RuntimeError Incorrect final result: the final result should be float/int, or a dict which has a key named "default" whose value is float/int. """ if isinstance(value, (float, int)): reward = value elif isinstance(value, dict) and scalar_key in value and isinstance(value[scalar_key], (float, int)): reward = value[scalar_key] else: raise RuntimeError('Incorrect final result: the final result should be float/int, ' \ 'or a dict which has a key named "default" whose value is float/int.') return reward
[ "def", "extract_scalar_reward", "(", "value", ",", "scalar_key", "=", "'default'", ")", ":", "if", "isinstance", "(", "value", ",", "(", "float", ",", "int", ")", ")", ":", "reward", "=", "value", "elif", "isinstance", "(", "value", ",", "dict", ")", "and", "scalar_key", "in", "value", "and", "isinstance", "(", "value", "[", "scalar_key", "]", ",", "(", "float", ",", "int", ")", ")", ":", "reward", "=", "value", "[", "scalar_key", "]", "else", ":", "raise", "RuntimeError", "(", "'Incorrect final result: the final result should be float/int, '", "'or a dict which has a key named \"default\" whose value is float/int.'", ")", "return", "reward" ]
[ 61, 0 ]
[ 85, 17 ]
python
en
['en', 'error', 'th']
False
extract_scalar_history
(trial_history, scalar_key='default')
Extract scalar value from a list of intermediate results. Parameters ---------- trial_history : list accumulated intermediate results of a trial scalar_key : str the key name that indicates the numeric number Raises ------ RuntimeError Incorrect final result: the final result should be float/int, or a dict which has a key named "default" whose value is float/int.
Extract scalar value from a list of intermediate results.
def extract_scalar_history(trial_history, scalar_key='default'): """ Extract scalar value from a list of intermediate results. Parameters ---------- trial_history : list accumulated intermediate results of a trial scalar_key : str the key name that indicates the numeric number Raises ------ RuntimeError Incorrect final result: the final result should be float/int, or a dict which has a key named "default" whose value is float/int. """ return [extract_scalar_reward(ele, scalar_key) for ele in trial_history]
[ "def", "extract_scalar_history", "(", "trial_history", ",", "scalar_key", "=", "'default'", ")", ":", "return", "[", "extract_scalar_reward", "(", "ele", ",", "scalar_key", ")", "for", "ele", "in", "trial_history", "]" ]
[ 88, 0 ]
[ 105, 76 ]
python
en
['en', 'error', 'th']
False
convert_dict2tuple
(value)
convert dict type to tuple to solve unhashable problem. NOTE: this function will change original data.
convert dict type to tuple to solve unhashable problem. NOTE: this function will change original data.
def convert_dict2tuple(value): """ convert dict type to tuple to solve unhashable problem. NOTE: this function will change original data. """ if isinstance(value, dict): for _keys in value: value[_keys] = convert_dict2tuple(value[_keys]) return tuple(sorted(value.items())) return value
[ "def", "convert_dict2tuple", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "for", "_keys", "in", "value", ":", "value", "[", "_keys", "]", "=", "convert_dict2tuple", "(", "value", "[", "_keys", "]", ")", "return", "tuple", "(", "sorted", "(", "value", ".", "items", "(", ")", ")", ")", "return", "value" ]
[ 108, 0 ]
[ 117, 16 ]
python
en
['en', 'error', 'th']
False
json2space
(x, oldy=None, name=NodeType.ROOT)
Change search space from json format to hyperopt format
Change search space from json format to hyperopt format
def json2space(x, oldy=None, name=NodeType.ROOT): """ Change search space from json format to hyperopt format """ y = list() if isinstance(x, dict): if NodeType.TYPE in x.keys(): _type = x[NodeType.TYPE] name = name + '-' + _type if _type == 'choice': if oldy is not None: _index = oldy[NodeType.INDEX] y += json2space(x[NodeType.VALUE][_index], oldy[NodeType.VALUE], name=name+'[%d]' % _index) else: y += json2space(x[NodeType.VALUE], None, name=name) y.append(name) else: for key in x.keys(): y += json2space(x[key], oldy[key] if oldy else None, name+"[%s]" % str(key)) elif isinstance(x, list): for i, x_i in enumerate(x): if isinstance(x_i, dict): if NodeType.NAME not in x_i.keys(): raise RuntimeError('\'_name\' key is not found in this nested search space.') y += json2space(x_i, oldy[i] if oldy else None, name + "[%d]" % i) return y
[ "def", "json2space", "(", "x", ",", "oldy", "=", "None", ",", "name", "=", "NodeType", ".", "ROOT", ")", ":", "y", "=", "list", "(", ")", "if", "isinstance", "(", "x", ",", "dict", ")", ":", "if", "NodeType", ".", "TYPE", "in", "x", ".", "keys", "(", ")", ":", "_type", "=", "x", "[", "NodeType", ".", "TYPE", "]", "name", "=", "name", "+", "'-'", "+", "_type", "if", "_type", "==", "'choice'", ":", "if", "oldy", "is", "not", "None", ":", "_index", "=", "oldy", "[", "NodeType", ".", "INDEX", "]", "y", "+=", "json2space", "(", "x", "[", "NodeType", ".", "VALUE", "]", "[", "_index", "]", ",", "oldy", "[", "NodeType", ".", "VALUE", "]", ",", "name", "=", "name", "+", "'[%d]'", "%", "_index", ")", "else", ":", "y", "+=", "json2space", "(", "x", "[", "NodeType", ".", "VALUE", "]", ",", "None", ",", "name", "=", "name", ")", "y", ".", "append", "(", "name", ")", "else", ":", "for", "key", "in", "x", ".", "keys", "(", ")", ":", "y", "+=", "json2space", "(", "x", "[", "key", "]", ",", "oldy", "[", "key", "]", "if", "oldy", "else", "None", ",", "name", "+", "\"[%s]\"", "%", "str", "(", "key", ")", ")", "elif", "isinstance", "(", "x", ",", "list", ")", ":", "for", "i", ",", "x_i", "in", "enumerate", "(", "x", ")", ":", "if", "isinstance", "(", "x_i", ",", "dict", ")", ":", "if", "NodeType", ".", "NAME", "not", "in", "x_i", ".", "keys", "(", ")", ":", "raise", "RuntimeError", "(", "'\\'_name\\' key is not found in this nested search space.'", ")", "y", "+=", "json2space", "(", "x_i", ",", "oldy", "[", "i", "]", "if", "oldy", "else", "None", ",", "name", "+", "\"[%d]\"", "%", "i", ")", "return", "y" ]
[ 120, 0 ]
[ 147, 12 ]
python
en
['en', 'error', 'th']
False
json2parameter
(x, is_rand, random_state, oldy=None, Rand=False, name=NodeType.ROOT)
Json to pramaters.
Json to pramaters.
def json2parameter(x, is_rand, random_state, oldy=None, Rand=False, name=NodeType.ROOT): """ Json to pramaters. """ if isinstance(x, dict): if NodeType.TYPE in x.keys(): _type = x[NodeType.TYPE] _value = x[NodeType.VALUE] name = name + '-' + _type Rand |= is_rand[name] if Rand is True: if _type == 'choice': _index = random_state.randint(len(_value)) y = { NodeType.INDEX: _index, NodeType.VALUE: json2parameter( x[NodeType.VALUE][_index], is_rand, random_state, None, Rand, name=name+"[%d]" % _index ) } else: y = getattr(parameter_expressions, _type)(*(_value + [random_state])) else: y = copy.deepcopy(oldy) else: y = dict() for key in x.keys(): y[key] = json2parameter( x[key], is_rand, random_state, oldy[key] if oldy else None, Rand, name + "[%s]" % str(key) ) elif isinstance(x, list): y = list() for i, x_i in enumerate(x): if isinstance(x_i, dict): if NodeType.NAME not in x_i.keys(): raise RuntimeError('\'_name\' key is not found in this nested search space.') y.append(json2parameter( x_i, is_rand, random_state, oldy[i] if oldy else None, Rand, name + "[%d]" % i )) else: y = copy.deepcopy(x) return y
[ "def", "json2parameter", "(", "x", ",", "is_rand", ",", "random_state", ",", "oldy", "=", "None", ",", "Rand", "=", "False", ",", "name", "=", "NodeType", ".", "ROOT", ")", ":", "if", "isinstance", "(", "x", ",", "dict", ")", ":", "if", "NodeType", ".", "TYPE", "in", "x", ".", "keys", "(", ")", ":", "_type", "=", "x", "[", "NodeType", ".", "TYPE", "]", "_value", "=", "x", "[", "NodeType", ".", "VALUE", "]", "name", "=", "name", "+", "'-'", "+", "_type", "Rand", "|=", "is_rand", "[", "name", "]", "if", "Rand", "is", "True", ":", "if", "_type", "==", "'choice'", ":", "_index", "=", "random_state", ".", "randint", "(", "len", "(", "_value", ")", ")", "y", "=", "{", "NodeType", ".", "INDEX", ":", "_index", ",", "NodeType", ".", "VALUE", ":", "json2parameter", "(", "x", "[", "NodeType", ".", "VALUE", "]", "[", "_index", "]", ",", "is_rand", ",", "random_state", ",", "None", ",", "Rand", ",", "name", "=", "name", "+", "\"[%d]\"", "%", "_index", ")", "}", "else", ":", "y", "=", "getattr", "(", "parameter_expressions", ",", "_type", ")", "(", "*", "(", "_value", "+", "[", "random_state", "]", ")", ")", "else", ":", "y", "=", "copy", ".", "deepcopy", "(", "oldy", ")", "else", ":", "y", "=", "dict", "(", ")", "for", "key", "in", "x", ".", "keys", "(", ")", ":", "y", "[", "key", "]", "=", "json2parameter", "(", "x", "[", "key", "]", ",", "is_rand", ",", "random_state", ",", "oldy", "[", "key", "]", "if", "oldy", "else", "None", ",", "Rand", ",", "name", "+", "\"[%s]\"", "%", "str", "(", "key", ")", ")", "elif", "isinstance", "(", "x", ",", "list", ")", ":", "y", "=", "list", "(", ")", "for", "i", ",", "x_i", "in", "enumerate", "(", "x", ")", ":", "if", "isinstance", "(", "x_i", ",", "dict", ")", ":", "if", "NodeType", ".", "NAME", "not", "in", "x_i", ".", "keys", "(", ")", ":", "raise", "RuntimeError", "(", "'\\'_name\\' key is not found in this nested search space.'", ")", "y", ".", "append", "(", "json2parameter", "(", "x_i", ",", "is_rand", ",", "random_state", ",", "oldy", "[", "i", "]", "if", "oldy", "else", "None", ",", "Rand", ",", "name", "+", "\"[%d]\"", "%", "i", ")", ")", "else", ":", "y", "=", "copy", ".", "deepcopy", "(", "x", ")", "return", "y" ]
[ 150, 0 ]
[ 206, 12 ]
python
en
['en', 'error', 'th']
False
merge_parameter
(base_params, override_params)
Update the parameters in ``base_params`` with ``override_params``. Can be useful to override parsed command line arguments. Parameters ---------- base_params : namespace or dict Base parameters. A key-value mapping. override_params : dict or None Parameters to override. Usually the parameters got from ``get_next_parameters()``. When it is none, nothing will happen. Returns ------- namespace or dict The updated ``base_params``. Note that ``base_params`` will be updated inplace. The return value is only for convenience.
Update the parameters in ``base_params`` with ``override_params``. Can be useful to override parsed command line arguments.
def merge_parameter(base_params, override_params): """ Update the parameters in ``base_params`` with ``override_params``. Can be useful to override parsed command line arguments. Parameters ---------- base_params : namespace or dict Base parameters. A key-value mapping. override_params : dict or None Parameters to override. Usually the parameters got from ``get_next_parameters()``. When it is none, nothing will happen. Returns ------- namespace or dict The updated ``base_params``. Note that ``base_params`` will be updated inplace. The return value is only for convenience. """ if override_params is None: return base_params is_dict = isinstance(base_params, dict) for k, v in override_params.items(): if is_dict: if k not in base_params: raise ValueError('Key \'%s\' not found in base parameters.' % k) if type(base_params[k]) != type(v) and base_params[k] is not None: raise TypeError('Expected \'%s\' in override parameters to have type \'%s\', but found \'%s\'.' % (k, type(base_params[k]), type(v))) base_params[k] = v else: if not hasattr(base_params, k): raise ValueError('Key \'%s\' not found in base parameters.' % k) if type(getattr(base_params, k)) != type(v) and getattr(base_params, k) is not None: raise TypeError('Expected \'%s\' in override parameters to have type \'%s\', but found \'%s\'.' % (k, type(getattr(base_params, k)), type(v))) setattr(base_params, k, v) return base_params
[ "def", "merge_parameter", "(", "base_params", ",", "override_params", ")", ":", "if", "override_params", "is", "None", ":", "return", "base_params", "is_dict", "=", "isinstance", "(", "base_params", ",", "dict", ")", "for", "k", ",", "v", "in", "override_params", ".", "items", "(", ")", ":", "if", "is_dict", ":", "if", "k", "not", "in", "base_params", ":", "raise", "ValueError", "(", "'Key \\'%s\\' not found in base parameters.'", "%", "k", ")", "if", "type", "(", "base_params", "[", "k", "]", ")", "!=", "type", "(", "v", ")", "and", "base_params", "[", "k", "]", "is", "not", "None", ":", "raise", "TypeError", "(", "'Expected \\'%s\\' in override parameters to have type \\'%s\\', but found \\'%s\\'.'", "%", "(", "k", ",", "type", "(", "base_params", "[", "k", "]", ")", ",", "type", "(", "v", ")", ")", ")", "base_params", "[", "k", "]", "=", "v", "else", ":", "if", "not", "hasattr", "(", "base_params", ",", "k", ")", ":", "raise", "ValueError", "(", "'Key \\'%s\\' not found in base parameters.'", "%", "k", ")", "if", "type", "(", "getattr", "(", "base_params", ",", "k", ")", ")", "!=", "type", "(", "v", ")", "and", "getattr", "(", "base_params", ",", "k", ")", "is", "not", "None", ":", "raise", "TypeError", "(", "'Expected \\'%s\\' in override parameters to have type \\'%s\\', but found \\'%s\\'.'", "%", "(", "k", ",", "type", "(", "getattr", "(", "base_params", ",", "k", ")", ")", ",", "type", "(", "v", ")", ")", ")", "setattr", "(", "base_params", ",", "k", ",", "v", ")", "return", "base_params" ]
[ 208, 0 ]
[ 245, 22 ]
python
en
['en', 'error', 'th']
False
ClassArgsValidator.validate_class_args
(self, **kwargs)
Validate the classArgs configuration in experiment configuration file. Parameters ---------- kwargs: dict kwargs passed to tuner/assessor/advisor constructor Raises: Raise an execption if the kwargs is invalid.
Validate the classArgs configuration in experiment configuration file.
def validate_class_args(self, **kwargs): """ Validate the classArgs configuration in experiment configuration file. Parameters ---------- kwargs: dict kwargs passed to tuner/assessor/advisor constructor Raises: Raise an execption if the kwargs is invalid. """ pass
[ "def", "validate_class_args", "(", "self", ",", "*", "*", "kwargs", ")", ":", "pass" ]
[ 253, 4 ]
[ 265, 12 ]
python
en
['en', 'error', 'th']
False
ClassArgsValidator.choices
(self, key, *args)
Utility method to create a scheme to check whether the `key` is one of the `args`. Parameters: ---------- key: str key name of the data to be validated args: list of str list of the choices Returns: Schema -------- A scheme to check whether the `key` is one of the `args`.
Utility method to create a scheme to check whether the `key` is one of the `args`.
def choices(self, key, *args): """ Utility method to create a scheme to check whether the `key` is one of the `args`. Parameters: ---------- key: str key name of the data to be validated args: list of str list of the choices Returns: Schema -------- A scheme to check whether the `key` is one of the `args`. """ return And(lambda n: n in args, error='%s should be in [%s]!' % (key, str(args)))
[ "def", "choices", "(", "self", ",", "key", ",", "*", "args", ")", ":", "return", "And", "(", "lambda", "n", ":", "n", "in", "args", ",", "error", "=", "'%s should be in [%s]!'", "%", "(", "key", ",", "str", "(", "args", ")", ")", ")" ]
[ 267, 4 ]
[ 282, 89 ]
python
en
['en', 'error', 'th']
False
ClassArgsValidator.range
(self, key, keyType, start, end)
Utility method to create a schema to check whether the `key` is in the range of [start, end]. Parameters: ---------- key: str key name of the data to be validated keyType: type python data type, such as int, float start: type is specified by keyType start of the range end: type is specified by keyType end of the range Returns: Schema -------- A scheme to check whether the `key` is in the range of [start, end].
Utility method to create a schema to check whether the `key` is in the range of [start, end].
def range(self, key, keyType, start, end): """ Utility method to create a schema to check whether the `key` is in the range of [start, end]. Parameters: ---------- key: str key name of the data to be validated keyType: type python data type, such as int, float start: type is specified by keyType start of the range end: type is specified by keyType end of the range Returns: Schema -------- A scheme to check whether the `key` is in the range of [start, end]. """ return And( And(keyType, error='%s should be %s type!' % (key, keyType.__name__)), And(lambda n: start <= n <= end, error='%s should be in range of (%s, %s)!' % (key, start, end)) )
[ "def", "range", "(", "self", ",", "key", ",", "keyType", ",", "start", ",", "end", ")", ":", "return", "And", "(", "And", "(", "keyType", ",", "error", "=", "'%s should be %s type!'", "%", "(", "key", ",", "keyType", ".", "__name__", ")", ")", ",", "And", "(", "lambda", "n", ":", "start", "<=", "n", "<=", "end", ",", "error", "=", "'%s should be in range of (%s, %s)!'", "%", "(", "key", ",", "start", ",", "end", ")", ")", ")" ]
[ 284, 4 ]
[ 306, 9 ]
python
en
['en', 'error', 'th']
False
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Unifi LED platform.
Set up the Unifi LED platform.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Unifi LED platform.""" # Assign configuration variables. # The configuration check takes care they are present. host = config[CONF_HOST] port = config[CONF_PORT] username = config[CONF_USERNAME] password = config[CONF_PASSWORD] api = unifiled(host, port, username=username, password=password) # Verify that passed in configuration works if not api.getloginstate(): _LOGGER.error("Could not connect to unifiled controller") return add_entities(UnifiLedLight(light, api) for light in api.getlights())
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "# Assign configuration variables.", "# The configuration check takes care they are present.", "host", "=", "config", "[", "CONF_HOST", "]", "port", "=", "config", "[", "CONF_PORT", "]", "username", "=", "config", "[", "CONF_USERNAME", "]", "password", "=", "config", "[", "CONF_PASSWORD", "]", "api", "=", "unifiled", "(", "host", ",", "port", ",", "username", "=", "username", ",", "password", "=", "password", ")", "# Verify that passed in configuration works", "if", "not", "api", ".", "getloginstate", "(", ")", ":", "_LOGGER", ".", "error", "(", "\"Could not connect to unifiled controller\"", ")", "return", "add_entities", "(", "UnifiLedLight", "(", "light", ",", "api", ")", "for", "light", "in", "api", ".", "getlights", "(", ")", ")" ]
[ 28, 0 ]
[ 45, 72 ]
python
en
['en', 'zu', 'en']
True
UnifiLedLight.__init__
(self, light, api)
Init Unifi LED Light.
Init Unifi LED Light.
def __init__(self, light, api): """Init Unifi LED Light.""" self._api = api self._light = light self._name = light["name"] self._unique_id = light["id"] self._state = light["status"]["output"] self._available = light["isOnline"] self._brightness = self._api.convertfrom100to255(light["status"]["led"]) self._features = SUPPORT_BRIGHTNESS
[ "def", "__init__", "(", "self", ",", "light", ",", "api", ")", ":", "self", ".", "_api", "=", "api", "self", ".", "_light", "=", "light", "self", ".", "_name", "=", "light", "[", "\"name\"", "]", "self", ".", "_unique_id", "=", "light", "[", "\"id\"", "]", "self", ".", "_state", "=", "light", "[", "\"status\"", "]", "[", "\"output\"", "]", "self", ".", "_available", "=", "light", "[", "\"isOnline\"", "]", "self", ".", "_brightness", "=", "self", ".", "_api", ".", "convertfrom100to255", "(", "light", "[", "\"status\"", "]", "[", "\"led\"", "]", ")", "self", ".", "_features", "=", "SUPPORT_BRIGHTNESS" ]
[ 51, 4 ]
[ 61, 43 ]
python
it
['ro', 'sq', 'it']
False
UnifiLedLight.name
(self)
Return the display name of this light.
Return the display name of this light.
def name(self): """Return the display name of this light.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 64, 4 ]
[ 66, 25 ]
python
en
['en', 'en', 'en']
True
UnifiLedLight.available
(self)
Return the available state of this light.
Return the available state of this light.
def available(self): """Return the available state of this light.""" return self._available
[ "def", "available", "(", "self", ")", ":", "return", "self", ".", "_available" ]
[ 69, 4 ]
[ 71, 30 ]
python
en
['en', 'en', 'en']
True
UnifiLedLight.brightness
(self)
Return the brightness name of this light.
Return the brightness name of this light.
def brightness(self): """Return the brightness name of this light.""" return self._brightness
[ "def", "brightness", "(", "self", ")", ":", "return", "self", ".", "_brightness" ]
[ 74, 4 ]
[ 76, 31 ]
python
en
['en', 'en', 'en']
True
UnifiLedLight.unique_id
(self)
Return the unique id of this light.
Return the unique id of this light.
def unique_id(self): """Return the unique id of this light.""" return self._unique_id
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_unique_id" ]
[ 79, 4 ]
[ 81, 30 ]
python
en
['en', 'la', 'en']
True
UnifiLedLight.is_on
(self)
Return true if light is on.
Return true if light is on.
def is_on(self): """Return true if light is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 84, 4 ]
[ 86, 26 ]
python
en
['en', 'et', 'en']
True
UnifiLedLight.supported_features
(self)
Return the supported features of this light.
Return the supported features of this light.
def supported_features(self): """Return the supported features of this light.""" return self._features
[ "def", "supported_features", "(", "self", ")", ":", "return", "self", ".", "_features" ]
[ 89, 4 ]
[ 91, 29 ]
python
en
['en', 'en', 'en']
True
UnifiLedLight.turn_on
(self, **kwargs)
Instruct the light to turn on.
Instruct the light to turn on.
def turn_on(self, **kwargs): """Instruct the light to turn on.""" self._api.setdevicebrightness( self._unique_id, str(self._api.convertfrom255to100(kwargs.get(ATTR_BRIGHTNESS, 255))), ) self._api.setdeviceoutput(self._unique_id, 1)
[ "def", "turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_api", ".", "setdevicebrightness", "(", "self", ".", "_unique_id", ",", "str", "(", "self", ".", "_api", ".", "convertfrom255to100", "(", "kwargs", ".", "get", "(", "ATTR_BRIGHTNESS", ",", "255", ")", ")", ")", ",", ")", "self", ".", "_api", ".", "setdeviceoutput", "(", "self", ".", "_unique_id", ",", "1", ")" ]
[ 93, 4 ]
[ 99, 53 ]
python
en
['en', 'en', 'en']
True
UnifiLedLight.turn_off
(self, **kwargs)
Instruct the light to turn off.
Instruct the light to turn off.
def turn_off(self, **kwargs): """Instruct the light to turn off.""" self._api.setdeviceoutput(self._unique_id, 0)
[ "def", "turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_api", ".", "setdeviceoutput", "(", "self", ".", "_unique_id", ",", "0", ")" ]
[ 101, 4 ]
[ 103, 53 ]
python
en
['en', 'en', 'en']
True
UnifiLedLight.update
(self)
Update the light states.
Update the light states.
def update(self): """Update the light states.""" self._state = self._api.getlightstate(self._unique_id) self._brightness = self._api.convertfrom100to255( self._api.getlightbrightness(self._unique_id) ) self._available = self._api.getlightavailable(self._unique_id)
[ "def", "update", "(", "self", ")", ":", "self", ".", "_state", "=", "self", ".", "_api", ".", "getlightstate", "(", "self", ".", "_unique_id", ")", "self", ".", "_brightness", "=", "self", ".", "_api", ".", "convertfrom100to255", "(", "self", ".", "_api", ".", "getlightbrightness", "(", "self", ".", "_unique_id", ")", ")", "self", ".", "_available", "=", "self", ".", "_api", ".", "getlightavailable", "(", "self", ".", "_unique_id", ")" ]
[ 105, 4 ]
[ 111, 70 ]
python
en
['en', 'en', 'en']
True
setup_scanner
(hass, config, see, discovery_info=None)
Set up the demo tracker.
Set up the demo tracker.
def setup_scanner(hass, config, see, discovery_info=None): """Set up the demo tracker.""" def offset(): """Return random offset.""" return (random.randrange(500, 2000)) / 2e5 * random.choice((-1, 1)) def random_see(dev_id, name): """Randomize a sighting.""" see( dev_id=dev_id, host_name=name, gps=(hass.config.latitude + offset(), hass.config.longitude + offset()), gps_accuracy=random.randrange(50, 150), battery=random.randrange(10, 90), ) def observe(call=None): """Observe three entities.""" random_see("demo_paulus", "Paulus") random_see("demo_anne_therese", "Anne Therese") observe() see( dev_id="demo_home_boy", host_name="Home Boy", gps=[hass.config.latitude - 0.00002, hass.config.longitude + 0.00002], gps_accuracy=20, battery=53, ) hass.services.register(DOMAIN, SERVICE_RANDOMIZE_DEVICE_TRACKER_DATA, observe) return True
[ "def", "setup_scanner", "(", "hass", ",", "config", ",", "see", ",", "discovery_info", "=", "None", ")", ":", "def", "offset", "(", ")", ":", "\"\"\"Return random offset.\"\"\"", "return", "(", "random", ".", "randrange", "(", "500", ",", "2000", ")", ")", "/", "2e5", "*", "random", ".", "choice", "(", "(", "-", "1", ",", "1", ")", ")", "def", "random_see", "(", "dev_id", ",", "name", ")", ":", "\"\"\"Randomize a sighting.\"\"\"", "see", "(", "dev_id", "=", "dev_id", ",", "host_name", "=", "name", ",", "gps", "=", "(", "hass", ".", "config", ".", "latitude", "+", "offset", "(", ")", ",", "hass", ".", "config", ".", "longitude", "+", "offset", "(", ")", ")", ",", "gps_accuracy", "=", "random", ".", "randrange", "(", "50", ",", "150", ")", ",", "battery", "=", "random", ".", "randrange", "(", "10", ",", "90", ")", ",", ")", "def", "observe", "(", "call", "=", "None", ")", ":", "\"\"\"Observe three entities.\"\"\"", "random_see", "(", "\"demo_paulus\"", ",", "\"Paulus\"", ")", "random_see", "(", "\"demo_anne_therese\"", ",", "\"Anne Therese\"", ")", "observe", "(", ")", "see", "(", "dev_id", "=", "\"demo_home_boy\"", ",", "host_name", "=", "\"Home Boy\"", ",", "gps", "=", "[", "hass", ".", "config", ".", "latitude", "-", "0.00002", ",", "hass", ".", "config", ".", "longitude", "+", "0.00002", "]", ",", "gps_accuracy", "=", "20", ",", "battery", "=", "53", ",", ")", "hass", ".", "services", ".", "register", "(", "DOMAIN", ",", "SERVICE_RANDOMIZE_DEVICE_TRACKER_DATA", ",", "observe", ")", "return", "True" ]
[ 6, 0 ]
[ 40, 15 ]
python
en
['en', 'en', 'en']
True
generate_url
(host, port)
Create a URL from the host and port.
Create a URL from the host and port.
def generate_url(host, port) -> str: """Create a URL from the host and port.""" server_origin = host if "://" not in host: server_origin = f"http://{host}" if server_origin[-1] == "/": server_origin = server_origin[:-1] return f"{server_origin}:{port}/"
[ "def", "generate_url", "(", "host", ",", "port", ")", "->", "str", ":", "server_origin", "=", "host", "if", "\"://\"", "not", "in", "host", ":", "server_origin", "=", "f\"http://{host}\"", "if", "server_origin", "[", "-", "1", "]", "==", "\"/\"", ":", "server_origin", "=", "server_origin", "[", ":", "-", "1", "]", "return", "f\"{server_origin}:{port}/\"" ]
[ 3, 0 ]
[ 12, 37 ]
python
en
['en', 'en', 'en']
True
_create_processor_from_config
(hass, camera_entity, config)
Create an OpenCV processor from configuration.
Create an OpenCV processor from configuration.
def _create_processor_from_config(hass, camera_entity, config): """Create an OpenCV processor from configuration.""" classifier_config = config.get(CONF_CLASSIFIER) name = f"{config[CONF_NAME]} {split_entity_id(camera_entity)[1].replace('_', ' ')}" processor = OpenCVImageProcessor(hass, camera_entity, name, classifier_config) return processor
[ "def", "_create_processor_from_config", "(", "hass", ",", "camera_entity", ",", "config", ")", ":", "classifier_config", "=", "config", ".", "get", "(", "CONF_CLASSIFIER", ")", "name", "=", "f\"{config[CONF_NAME]} {split_entity_id(camera_entity)[1].replace('_', ' ')}\"", "processor", "=", "OpenCVImageProcessor", "(", "hass", ",", "camera_entity", ",", "name", ",", "classifier_config", ")", "return", "processor" ]
[ 74, 0 ]
[ 81, 20 ]
python
en
['en', 'en', 'en']
True
_get_default_classifier
(dest_path)
Download the default OpenCV classifier.
Download the default OpenCV classifier.
def _get_default_classifier(dest_path): """Download the default OpenCV classifier.""" _LOGGER.info("Downloading default classifier") req = requests.get(CASCADE_URL, stream=True) with open(dest_path, "wb") as fil: for chunk in req.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks fil.write(chunk)
[ "def", "_get_default_classifier", "(", "dest_path", ")", ":", "_LOGGER", ".", "info", "(", "\"Downloading default classifier\"", ")", "req", "=", "requests", ".", "get", "(", "CASCADE_URL", ",", "stream", "=", "True", ")", "with", "open", "(", "dest_path", ",", "\"wb\"", ")", "as", "fil", ":", "for", "chunk", "in", "req", ".", "iter_content", "(", "chunk_size", "=", "1024", ")", ":", "if", "chunk", ":", "# filter out keep-alive new chunks", "fil", ".", "write", "(", "chunk", ")" ]
[ 84, 0 ]
[ 91, 32 ]
python
en
['en', 'nl', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the OpenCV image processing platform.
Set up the OpenCV image processing platform.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the OpenCV image processing platform.""" if not CV2_IMPORTED: _LOGGER.error( "No OpenCV library found! Install or compile for your system " "following instructions here: http://opencv.org/releases.html" ) return entities = [] if CONF_CLASSIFIER not in config: dest_path = hass.config.path(DEFAULT_CLASSIFIER_PATH) _get_default_classifier(dest_path) config[CONF_CLASSIFIER] = {"Face": dest_path} for camera in config[CONF_SOURCE]: entities.append( OpenCVImageProcessor( hass, camera[CONF_ENTITY_ID], camera.get(CONF_NAME), config[CONF_CLASSIFIER], ) ) add_entities(entities)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "if", "not", "CV2_IMPORTED", ":", "_LOGGER", ".", "error", "(", "\"No OpenCV library found! Install or compile for your system \"", "\"following instructions here: http://opencv.org/releases.html\"", ")", "return", "entities", "=", "[", "]", "if", "CONF_CLASSIFIER", "not", "in", "config", ":", "dest_path", "=", "hass", ".", "config", ".", "path", "(", "DEFAULT_CLASSIFIER_PATH", ")", "_get_default_classifier", "(", "dest_path", ")", "config", "[", "CONF_CLASSIFIER", "]", "=", "{", "\"Face\"", ":", "dest_path", "}", "for", "camera", "in", "config", "[", "CONF_SOURCE", "]", ":", "entities", ".", "append", "(", "OpenCVImageProcessor", "(", "hass", ",", "camera", "[", "CONF_ENTITY_ID", "]", ",", "camera", ".", "get", "(", "CONF_NAME", ")", ",", "config", "[", "CONF_CLASSIFIER", "]", ",", ")", ")", "add_entities", "(", "entities", ")" ]
[ 94, 0 ]
[ 119, 26 ]
python
en
['en', 'da', 'en']
True
OpenCVImageProcessor.__init__
(self, hass, camera_entity, name, classifiers)
Initialize the OpenCV entity.
Initialize the OpenCV entity.
def __init__(self, hass, camera_entity, name, classifiers): """Initialize the OpenCV entity.""" self.hass = hass self._camera_entity = camera_entity if name: self._name = name else: self._name = f"OpenCV {split_entity_id(camera_entity)[1]}" self._classifiers = classifiers self._matches = {} self._total_matches = 0 self._last_image = None
[ "def", "__init__", "(", "self", ",", "hass", ",", "camera_entity", ",", "name", ",", "classifiers", ")", ":", "self", ".", "hass", "=", "hass", "self", ".", "_camera_entity", "=", "camera_entity", "if", "name", ":", "self", ".", "_name", "=", "name", "else", ":", "self", ".", "_name", "=", "f\"OpenCV {split_entity_id(camera_entity)[1]}\"", "self", ".", "_classifiers", "=", "classifiers", "self", ".", "_matches", "=", "{", "}", "self", ".", "_total_matches", "=", "0", "self", ".", "_last_image", "=", "None" ]
[ 125, 4 ]
[ 136, 31 ]
python
en
['en', 'en', 'en']
True
OpenCVImageProcessor.camera_entity
(self)
Return camera entity id from process pictures.
Return camera entity id from process pictures.
def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera_entity
[ "def", "camera_entity", "(", "self", ")", ":", "return", "self", ".", "_camera_entity" ]
[ 139, 4 ]
[ 141, 34 ]
python
en
['en', 'en', 'en']
True
OpenCVImageProcessor.name
(self)
Return the name of the image processor.
Return the name of the image processor.
def name(self): """Return the name of the image processor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 144, 4 ]
[ 146, 25 ]
python
en
['en', 'en', 'en']
True
OpenCVImageProcessor.state
(self)
Return the state of the entity.
Return the state of the entity.
def state(self): """Return the state of the entity.""" return self._total_matches
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_total_matches" ]
[ 149, 4 ]
[ 151, 34 ]
python
en
['en', 'en', 'en']
True
OpenCVImageProcessor.state_attributes
(self)
Return device specific state attributes.
Return device specific state attributes.
def state_attributes(self): """Return device specific state attributes.""" return {ATTR_MATCHES: self._matches, ATTR_TOTAL_MATCHES: self._total_matches}
[ "def", "state_attributes", "(", "self", ")", ":", "return", "{", "ATTR_MATCHES", ":", "self", ".", "_matches", ",", "ATTR_TOTAL_MATCHES", ":", "self", ".", "_total_matches", "}" ]
[ 154, 4 ]
[ 156, 85 ]
python
en
['fr', 'en', 'en']
True
OpenCVImageProcessor.process_image
(self, image)
Process the image.
Process the image.
def process_image(self, image): """Process the image.""" cv_image = cv2.imdecode(numpy.asarray(bytearray(image)), cv2.IMREAD_UNCHANGED) matches = {} total_matches = 0 for name, classifier in self._classifiers.items(): scale = DEFAULT_SCALE neighbors = DEFAULT_NEIGHBORS min_size = DEFAULT_MIN_SIZE if isinstance(classifier, dict): path = classifier[CONF_FILE] scale = classifier.get(CONF_SCALE, scale) neighbors = classifier.get(CONF_NEIGHBORS, neighbors) min_size = classifier.get(CONF_MIN_SIZE, min_size) else: path = classifier cascade = cv2.CascadeClassifier(path) detections = cascade.detectMultiScale( cv_image, scaleFactor=scale, minNeighbors=neighbors, minSize=min_size ) regions = [] # pylint: disable=invalid-name for (x, y, w, h) in detections: regions.append((int(x), int(y), int(w), int(h))) total_matches += 1 matches[name] = regions self._matches = matches self._total_matches = total_matches
[ "def", "process_image", "(", "self", ",", "image", ")", ":", "cv_image", "=", "cv2", ".", "imdecode", "(", "numpy", ".", "asarray", "(", "bytearray", "(", "image", ")", ")", ",", "cv2", ".", "IMREAD_UNCHANGED", ")", "matches", "=", "{", "}", "total_matches", "=", "0", "for", "name", ",", "classifier", "in", "self", ".", "_classifiers", ".", "items", "(", ")", ":", "scale", "=", "DEFAULT_SCALE", "neighbors", "=", "DEFAULT_NEIGHBORS", "min_size", "=", "DEFAULT_MIN_SIZE", "if", "isinstance", "(", "classifier", ",", "dict", ")", ":", "path", "=", "classifier", "[", "CONF_FILE", "]", "scale", "=", "classifier", ".", "get", "(", "CONF_SCALE", ",", "scale", ")", "neighbors", "=", "classifier", ".", "get", "(", "CONF_NEIGHBORS", ",", "neighbors", ")", "min_size", "=", "classifier", ".", "get", "(", "CONF_MIN_SIZE", ",", "min_size", ")", "else", ":", "path", "=", "classifier", "cascade", "=", "cv2", ".", "CascadeClassifier", "(", "path", ")", "detections", "=", "cascade", ".", "detectMultiScale", "(", "cv_image", ",", "scaleFactor", "=", "scale", ",", "minNeighbors", "=", "neighbors", ",", "minSize", "=", "min_size", ")", "regions", "=", "[", "]", "# pylint: disable=invalid-name", "for", "(", "x", ",", "y", ",", "w", ",", "h", ")", "in", "detections", ":", "regions", ".", "append", "(", "(", "int", "(", "x", ")", ",", "int", "(", "y", ")", ",", "int", "(", "w", ")", ",", "int", "(", "h", ")", ")", ")", "total_matches", "+=", "1", "matches", "[", "name", "]", "=", "regions", "self", ".", "_matches", "=", "matches", "self", ".", "_total_matches", "=", "total_matches" ]
[ 158, 4 ]
[ 191, 43 ]
python
en
['en', 'en', 'en']
True
XboxBaseSensorEntity.__init__
(self, coordinator: XboxUpdateCoordinator, xuid: str, attribute: str)
Initialize Xbox binary sensor.
Initialize Xbox binary sensor.
def __init__(self, coordinator: XboxUpdateCoordinator, xuid: str, attribute: str): """Initialize Xbox binary sensor.""" super().__init__(coordinator) self.xuid = xuid self.attribute = attribute
[ "def", "__init__", "(", "self", ",", "coordinator", ":", "XboxUpdateCoordinator", ",", "xuid", ":", "str", ",", "attribute", ":", "str", ")", ":", "super", "(", ")", ".", "__init__", "(", "coordinator", ")", "self", ".", "xuid", "=", "xuid", "self", ".", "attribute", "=", "attribute" ]
[ 12, 4 ]
[ 16, 34 ]
python
en
['en', 'pl', 'en']
True
XboxBaseSensorEntity.unique_id
(self)
Return a unique, Home Assistant friendly identifier for this entity.
Return a unique, Home Assistant friendly identifier for this entity.
def unique_id(self) -> str: """Return a unique, Home Assistant friendly identifier for this entity.""" return f"{self.xuid}_{self.attribute}"
[ "def", "unique_id", "(", "self", ")", "->", "str", ":", "return", "f\"{self.xuid}_{self.attribute}\"" ]
[ 19, 4 ]
[ 21, 46 ]
python
en
['en', 'en', 'en']
True
XboxBaseSensorEntity.data
(self)
Return coordinator data for this console.
Return coordinator data for this console.
def data(self) -> Optional[PresenceData]: """Return coordinator data for this console.""" return self.coordinator.data.presence.get(self.xuid)
[ "def", "data", "(", "self", ")", "->", "Optional", "[", "PresenceData", "]", ":", "return", "self", ".", "coordinator", ".", "data", ".", "presence", ".", "get", "(", "self", ".", "xuid", ")" ]
[ 24, 4 ]
[ 26, 60 ]
python
en
['en', 'en', 'en']
True
XboxBaseSensorEntity.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self) -> str: """Return the name of the sensor.""" if not self.data: return None if self.attribute == "online": return self.data.gamertag attr_name = " ".join([part.title() for part in self.attribute.split("_")]) return f"{self.data.gamertag} {attr_name}"
[ "def", "name", "(", "self", ")", "->", "str", ":", "if", "not", "self", ".", "data", ":", "return", "None", "if", "self", ".", "attribute", "==", "\"online\"", ":", "return", "self", ".", "data", ".", "gamertag", "attr_name", "=", "\" \"", ".", "join", "(", "[", "part", ".", "title", "(", ")", "for", "part", "in", "self", ".", "attribute", ".", "split", "(", "\"_\"", ")", "]", ")", "return", "f\"{self.data.gamertag} {attr_name}\"" ]
[ 29, 4 ]
[ 38, 50 ]
python
en
['en', 'mi', 'en']
True
XboxBaseSensorEntity.entity_picture
(self)
Return the gamer pic.
Return the gamer pic.
def entity_picture(self) -> str: """Return the gamer pic.""" if not self.data: return None return self.data.display_pic.replace("&mode=Padding", "")
[ "def", "entity_picture", "(", "self", ")", "->", "str", ":", "if", "not", "self", ".", "data", ":", "return", "None", "return", "self", ".", "data", ".", "display_pic", ".", "replace", "(", "\"&mode=Padding\"", ",", "\"\"", ")" ]
[ 41, 4 ]
[ 46, 65 ]
python
en
['en', 'no', 'en']
True
XboxBaseSensorEntity.entity_registry_enabled_default
(self)
Return if the entity should be enabled when first added to the entity registry.
Return if the entity should be enabled when first added to the entity registry.
def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return self.attribute == "online"
[ "def", "entity_registry_enabled_default", "(", "self", ")", "->", "bool", ":", "return", "self", ".", "attribute", "==", "\"online\"" ]
[ 49, 4 ]
[ 51, 41 ]
python
en
['en', 'en', 'en']
True
XboxBaseSensorEntity.device_info
(self)
Return a device description for device registry.
Return a device description for device registry.
def device_info(self): """Return a device description for device registry.""" return { "identifiers": {(DOMAIN, "xbox_live")}, "name": "Xbox Live", "manufacturer": "Microsoft", "model": "Xbox Live", "entry_type": "service", }
[ "def", "device_info", "(", "self", ")", ":", "return", "{", "\"identifiers\"", ":", "{", "(", "DOMAIN", ",", "\"xbox_live\"", ")", "}", ",", "\"name\"", ":", "\"Xbox Live\"", ",", "\"manufacturer\"", ":", "\"Microsoft\"", ",", "\"model\"", ":", "\"Xbox Live\"", ",", "\"entry_type\"", ":", "\"service\"", ",", "}" ]
[ 54, 4 ]
[ 62, 9 ]
python
en
['ro', 'fr', 'en']
False
get_service
(hass, config, discovery_info=None)
Get the Dovado Router SMS notification service.
Get the Dovado Router SMS notification service.
def get_service(hass, config, discovery_info=None): """Get the Dovado Router SMS notification service.""" return DovadoSMSNotificationService(hass.data[DOVADO_DOMAIN].client)
[ "def", "get_service", "(", "hass", ",", "config", ",", "discovery_info", "=", "None", ")", ":", "return", "DovadoSMSNotificationService", "(", "hass", ".", "data", "[", "DOVADO_DOMAIN", "]", ".", "client", ")" ]
[ 10, 0 ]
[ 12, 72 ]
python
en
['en', 'en', 'en']
True
DovadoSMSNotificationService.__init__
(self, client)
Initialize the service.
Initialize the service.
def __init__(self, client): """Initialize the service.""" self._client = client
[ "def", "__init__", "(", "self", ",", "client", ")", ":", "self", ".", "_client", "=", "client" ]
[ 18, 4 ]
[ 20, 29 ]
python
en
['en', 'en', 'en']
True
DovadoSMSNotificationService.send_message
(self, message, **kwargs)
Send SMS to the specified target phone number.
Send SMS to the specified target phone number.
def send_message(self, message, **kwargs): """Send SMS to the specified target phone number.""" target = kwargs.get(ATTR_TARGET) if not target: _LOGGER.error("One target is required") return self._client.send_sms(target, message)
[ "def", "send_message", "(", "self", ",", "message", ",", "*", "*", "kwargs", ")", ":", "target", "=", "kwargs", ".", "get", "(", "ATTR_TARGET", ")", "if", "not", "target", ":", "_LOGGER", ".", "error", "(", "\"One target is required\"", ")", "return", "self", ".", "_client", ".", "send_sms", "(", "target", ",", "message", ")" ]
[ 22, 4 ]
[ 30, 46 ]
python
en
['en', 'en', 'en']
True
setup
(hass, config)
Set up the Pilight component.
Set up the Pilight component.
def setup(hass, config): """Set up the Pilight component.""" host = config[DOMAIN][CONF_HOST] port = config[DOMAIN][CONF_PORT] send_throttler = CallRateDelayThrottle(hass, config[DOMAIN][CONF_SEND_DELAY]) try: pilight_client = pilight.Client(host=host, port=port) except (OSError, socket.timeout) as err: _LOGGER.error("Unable to connect to %s on port %s: %s", host, port, err) return False def start_pilight_client(_): """Run when Home Assistant starts.""" pilight_client.start() hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_pilight_client) def stop_pilight_client(_): """Run once when Home Assistant stops.""" pilight_client.stop() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_pilight_client) @send_throttler.limited def send_code(call): """Send RF code to the pilight-daemon.""" # Change type to dict from mappingproxy since data has to be JSON # serializable message_data = dict(call.data) try: pilight_client.send_code(message_data) except OSError: _LOGGER.error("Pilight send failed for %s", str(message_data)) hass.services.register(DOMAIN, SERVICE_NAME, send_code, schema=RF_CODE_SCHEMA) # Publish received codes on the HA event bus # A whitelist of codes to be published in the event bus whitelist = config[DOMAIN].get(CONF_WHITELIST) def handle_received_code(data): """Run when RF codes are received.""" # Unravel dict of dicts to make event_data cut in automation rule # possible data = dict( {"protocol": data["protocol"], "uuid": data["uuid"]}, **data["message"] ) # No whitelist defined, put data on event bus if not whitelist: hass.bus.fire(EVENT, data) # Check if data matches the defined whitelist elif all(str(data[key]) in whitelist[key] for key in whitelist): hass.bus.fire(EVENT, data) pilight_client.set_callback(handle_received_code) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "host", "=", "config", "[", "DOMAIN", "]", "[", "CONF_HOST", "]", "port", "=", "config", "[", "DOMAIN", "]", "[", "CONF_PORT", "]", "send_throttler", "=", "CallRateDelayThrottle", "(", "hass", ",", "config", "[", "DOMAIN", "]", "[", "CONF_SEND_DELAY", "]", ")", "try", ":", "pilight_client", "=", "pilight", ".", "Client", "(", "host", "=", "host", ",", "port", "=", "port", ")", "except", "(", "OSError", ",", "socket", ".", "timeout", ")", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Unable to connect to %s on port %s: %s\"", ",", "host", ",", "port", ",", "err", ")", "return", "False", "def", "start_pilight_client", "(", "_", ")", ":", "\"\"\"Run when Home Assistant starts.\"\"\"", "pilight_client", ".", "start", "(", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_START", ",", "start_pilight_client", ")", "def", "stop_pilight_client", "(", "_", ")", ":", "\"\"\"Run once when Home Assistant stops.\"\"\"", "pilight_client", ".", "stop", "(", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "stop_pilight_client", ")", "@", "send_throttler", ".", "limited", "def", "send_code", "(", "call", ")", ":", "\"\"\"Send RF code to the pilight-daemon.\"\"\"", "# Change type to dict from mappingproxy since data has to be JSON", "# serializable", "message_data", "=", "dict", "(", "call", ".", "data", ")", "try", ":", "pilight_client", ".", "send_code", "(", "message_data", ")", "except", "OSError", ":", "_LOGGER", ".", "error", "(", "\"Pilight send failed for %s\"", ",", "str", "(", "message_data", ")", ")", "hass", ".", "services", ".", "register", "(", "DOMAIN", ",", "SERVICE_NAME", ",", "send_code", ",", "schema", "=", "RF_CODE_SCHEMA", ")", "# Publish received codes on the HA event bus", "# A whitelist of codes to be published in the event bus", "whitelist", "=", "config", "[", "DOMAIN", "]", ".", "get", "(", "CONF_WHITELIST", ")", "def", "handle_received_code", "(", "data", ")", ":", "\"\"\"Run when RF codes are received.\"\"\"", "# Unravel dict of dicts to make event_data cut in automation rule", "# possible", "data", "=", "dict", "(", "{", "\"protocol\"", ":", "data", "[", "\"protocol\"", "]", ",", "\"uuid\"", ":", "data", "[", "\"uuid\"", "]", "}", ",", "*", "*", "data", "[", "\"message\"", "]", ")", "# No whitelist defined, put data on event bus", "if", "not", "whitelist", ":", "hass", ".", "bus", ".", "fire", "(", "EVENT", ",", "data", ")", "# Check if data matches the defined whitelist", "elif", "all", "(", "str", "(", "data", "[", "key", "]", ")", "in", "whitelist", "[", "key", "]", "for", "key", "in", "whitelist", ")", ":", "hass", ".", "bus", ".", "fire", "(", "EVENT", ",", "data", ")", "pilight_client", ".", "set_callback", "(", "handle_received_code", ")", "return", "True" ]
[ 60, 0 ]
[ 120, 15 ]
python
en
['en', 'en', 'en']
True
CallRateDelayThrottle.__init__
(self, hass, delay_seconds: float)
Initialize the delay handler.
Initialize the delay handler.
def __init__(self, hass, delay_seconds: float) -> None: """Initialize the delay handler.""" self._delay = timedelta(seconds=max(0.0, delay_seconds)) self._queue = [] self._active = False self._lock = threading.Lock() self._next_ts = dt_util.utcnow() self._schedule = functools.partial(track_point_in_utc_time, hass)
[ "def", "__init__", "(", "self", ",", "hass", ",", "delay_seconds", ":", "float", ")", "->", "None", ":", "self", ".", "_delay", "=", "timedelta", "(", "seconds", "=", "max", "(", "0.0", ",", "delay_seconds", ")", ")", "self", ".", "_queue", "=", "[", "]", "self", ".", "_active", "=", "False", "self", ".", "_lock", "=", "threading", ".", "Lock", "(", ")", "self", ".", "_next_ts", "=", "dt_util", ".", "utcnow", "(", ")", "self", ".", "_schedule", "=", "functools", ".", "partial", "(", "track_point_in_utc_time", ",", "hass", ")" ]
[ 135, 4 ]
[ 142, 73 ]
python
en
['en', 'en', 'en']
True
CallRateDelayThrottle.limited
(self, method)
Decorate to delay calls on a certain method.
Decorate to delay calls on a certain method.
def limited(self, method): """Decorate to delay calls on a certain method.""" @functools.wraps(method) def decorated(*args, **kwargs): """Delay a call.""" if self._delay.total_seconds() == 0.0: method(*args, **kwargs) return def action(event): """Wrap an action that gets scheduled.""" method(*args, **kwargs) with self._lock: self._next_ts = dt_util.utcnow() + self._delay if not self._queue: self._active = False else: next_action = self._queue.pop(0) self._schedule(next_action, self._next_ts) with self._lock: if self._active: self._queue.append(action) else: self._active = True schedule_ts = max(dt_util.utcnow(), self._next_ts) self._schedule(action, schedule_ts) return decorated
[ "def", "limited", "(", "self", ",", "method", ")", ":", "@", "functools", ".", "wraps", "(", "method", ")", "def", "decorated", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Delay a call.\"\"\"", "if", "self", ".", "_delay", ".", "total_seconds", "(", ")", "==", "0.0", ":", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "def", "action", "(", "event", ")", ":", "\"\"\"Wrap an action that gets scheduled.\"\"\"", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "with", "self", ".", "_lock", ":", "self", ".", "_next_ts", "=", "dt_util", ".", "utcnow", "(", ")", "+", "self", ".", "_delay", "if", "not", "self", ".", "_queue", ":", "self", ".", "_active", "=", "False", "else", ":", "next_action", "=", "self", ".", "_queue", ".", "pop", "(", "0", ")", "self", ".", "_schedule", "(", "next_action", ",", "self", ".", "_next_ts", ")", "with", "self", ".", "_lock", ":", "if", "self", ".", "_active", ":", "self", ".", "_queue", ".", "append", "(", "action", ")", "else", ":", "self", ".", "_active", "=", "True", "schedule_ts", "=", "max", "(", "dt_util", ".", "utcnow", "(", ")", ",", "self", ".", "_next_ts", ")", "self", ".", "_schedule", "(", "action", ",", "schedule_ts", ")", "return", "decorated" ]
[ 144, 4 ]
[ 175, 24 ]
python
en
['en', 'en', 'en']
True
run_upload_docker
()
Run the Docker image to upload the translations.
Run the Docker image to upload the translations.
def run_upload_docker(): """Run the Docker image to upload the translations.""" print("Running Docker to upload latest translations.") run = subprocess.run( [ "docker", "run", "-v", f"{LOCAL_FILE}:{CONTAINER_FILE}", "--rm", f"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}", # Lokalise command "lokalise2", "--token", get_lokalise_token(), "--project-id", CORE_PROJECT_ID, "file", "upload", "--file", CONTAINER_FILE, "--lang-iso", LANG_ISO, "--convert-placeholders=false", "--replace-modified", ], ) print() if run.returncode != 0: raise ExitApp("Failed to download translations")
[ "def", "run_upload_docker", "(", ")", ":", "print", "(", "\"Running Docker to upload latest translations.\"", ")", "run", "=", "subprocess", ".", "run", "(", "[", "\"docker\"", ",", "\"run\"", ",", "\"-v\"", ",", "f\"{LOCAL_FILE}:{CONTAINER_FILE}\"", ",", "\"--rm\"", ",", "f\"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}\"", ",", "# Lokalise command", "\"lokalise2\"", ",", "\"--token\"", ",", "get_lokalise_token", "(", ")", ",", "\"--project-id\"", ",", "CORE_PROJECT_ID", ",", "\"file\"", ",", "\"upload\"", ",", "\"--file\"", ",", "CONTAINER_FILE", ",", "\"--lang-iso\"", ",", "LANG_ISO", ",", "\"--convert-placeholders=false\"", ",", "\"--replace-modified\"", ",", "]", ",", ")", "print", "(", ")", "if", "run", ".", "returncode", "!=", "0", ":", "raise", "ExitApp", "(", "\"Failed to download translations\"", ")" ]
[ 18, 0 ]
[ 48, 56 ]
python
en
['en', 'en', 'en']
True
generate_upload_data
()
Generate the data for uploading.
Generate the data for uploading.
def generate_upload_data(): """Generate the data for uploading.""" translations = json.loads((INTEGRATIONS_DIR.parent / "strings.json").read_text()) translations["component"] = {} for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"): component = path.parent.name match = FILENAME_FORMAT.search(path.name) platform = match.group("suffix") if match else None parent = translations["component"].setdefault(component, {}) if platform: platforms = parent.setdefault("platform", {}) parent = platforms.setdefault(platform, {}) parent.update(json.loads(path.read_text())) return translations
[ "def", "generate_upload_data", "(", ")", ":", "translations", "=", "json", ".", "loads", "(", "(", "INTEGRATIONS_DIR", ".", "parent", "/", "\"strings.json\"", ")", ".", "read_text", "(", ")", ")", "translations", "[", "\"component\"", "]", "=", "{", "}", "for", "path", "in", "INTEGRATIONS_DIR", ".", "glob", "(", "f\"*{os.sep}strings*.json\"", ")", ":", "component", "=", "path", ".", "parent", ".", "name", "match", "=", "FILENAME_FORMAT", ".", "search", "(", "path", ".", "name", ")", "platform", "=", "match", ".", "group", "(", "\"suffix\"", ")", "if", "match", "else", "None", "parent", "=", "translations", "[", "\"component\"", "]", ".", "setdefault", "(", "component", ",", "{", "}", ")", "if", "platform", ":", "platforms", "=", "parent", ".", "setdefault", "(", "\"platform\"", ",", "{", "}", ")", "parent", "=", "platforms", ".", "setdefault", "(", "platform", ",", "{", "}", ")", "parent", ".", "update", "(", "json", ".", "loads", "(", "path", ".", "read_text", "(", ")", ")", ")", "return", "translations" ]
[ 51, 0 ]
[ 69, 23 ]
python
en
['en', 'en', 'en']
True
run
()
Run the script.
Run the script.
def run(): """Run the script.""" if get_current_branch() != "dev" and os.environ.get("AZURE_BRANCH") != "dev": raise ExitApp( "Please only run the translations upload script from a clean checkout of dev." ) translations = generate_upload_data() LOCAL_FILE.parent.mkdir(parents=True, exist_ok=True) LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True)) run_upload_docker() return 0
[ "def", "run", "(", ")", ":", "if", "get_current_branch", "(", ")", "!=", "\"dev\"", "and", "os", ".", "environ", ".", "get", "(", "\"AZURE_BRANCH\"", ")", "!=", "\"dev\"", ":", "raise", "ExitApp", "(", "\"Please only run the translations upload script from a clean checkout of dev.\"", ")", "translations", "=", "generate_upload_data", "(", ")", "LOCAL_FILE", ".", "parent", ".", "mkdir", "(", "parents", "=", "True", ",", "exist_ok", "=", "True", ")", "LOCAL_FILE", ".", "write_text", "(", "json", ".", "dumps", "(", "translations", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", ")", "run_upload_docker", "(", ")", "return", "0" ]
[ 72, 0 ]
[ 86, 12 ]
python
en
['en', 'co', 'en']
True
mock_user_data
()
Mock os module.
Mock os module.
def mock_user_data(): """Mock os module.""" with patch("hass_nabucasa.Cloud.write_user_info") as writer: yield writer
[ "def", "mock_user_data", "(", ")", ":", "with", "patch", "(", "\"hass_nabucasa.Cloud.write_user_info\"", ")", "as", "writer", ":", "yield", "writer" ]
[ 12, 0 ]
[ 15, 20 ]
python
es
['es', 'tr', 'pt']
False
mock_cloud_fixture
(hass)
Fixture for cloud component.
Fixture for cloud component.
def mock_cloud_fixture(hass): """Fixture for cloud component.""" hass.loop.run_until_complete(mock_cloud(hass)) return mock_cloud_prefs(hass)
[ "def", "mock_cloud_fixture", "(", "hass", ")", ":", "hass", ".", "loop", ".", "run_until_complete", "(", "mock_cloud", "(", "hass", ")", ")", "return", "mock_cloud_prefs", "(", "hass", ")" ]
[ 19, 0 ]
[ 22, 33 ]
python
en
['en', 'en', 'en']
True
cloud_prefs
(hass)
Fixture for cloud preferences.
Fixture for cloud preferences.
async def cloud_prefs(hass): """Fixture for cloud preferences.""" cloud_prefs = prefs.CloudPreferences(hass) await cloud_prefs.async_initialize() return cloud_prefs
[ "async", "def", "cloud_prefs", "(", "hass", ")", ":", "cloud_prefs", "=", "prefs", ".", "CloudPreferences", "(", "hass", ")", "await", "cloud_prefs", ".", "async_initialize", "(", ")", "return", "cloud_prefs" ]
[ 26, 0 ]
[ 30, 22 ]
python
en
['en', 'en', 'en']
True
mock_cloud_setup
(hass)
Set up the cloud.
Set up the cloud.
async def mock_cloud_setup(hass): """Set up the cloud.""" await mock_cloud(hass)
[ "async", "def", "mock_cloud_setup", "(", "hass", ")", ":", "await", "mock_cloud", "(", "hass", ")" ]
[ 34, 0 ]
[ 36, 26 ]
python
en
['en', 'fr', 'en']
True
mock_cloud_login
(hass, mock_cloud_setup)
Mock cloud is logged in.
Mock cloud is logged in.
def mock_cloud_login(hass, mock_cloud_setup): """Mock cloud is logged in.""" hass.data[const.DOMAIN].id_token = jwt.encode( { "email": "hello@home-assistant.io", "custom:sub-exp": "2018-01-03", "cognito:username": "abcdefghjkl", }, "test", )
[ "def", "mock_cloud_login", "(", "hass", ",", "mock_cloud_setup", ")", ":", "hass", ".", "data", "[", "const", ".", "DOMAIN", "]", ".", "id_token", "=", "jwt", ".", "encode", "(", "{", "\"email\"", ":", "\"hello@home-assistant.io\"", ",", "\"custom:sub-exp\"", ":", "\"2018-01-03\"", ",", "\"cognito:username\"", ":", "\"abcdefghjkl\"", ",", "}", ",", "\"test\"", ",", ")" ]
[ 40, 0 ]
[ 49, 5 ]
python
en
['en', 'fy', 'en']
True
test_setup_configuration
(hass)
Test for successfully setting up the IPMA platform.
Test for successfully setting up the IPMA platform.
async def test_setup_configuration(hass): """Test for successfully setting up the IPMA platform.""" with patch( "homeassistant.components.ipma.weather.async_get_location", return_value=MockLocation(), ): assert await async_setup_component( hass, weather.DOMAIN, {"weather": {"name": "HomeTown", "platform": "ipma", "mode": "hourly"}}, ) await hass.async_block_till_done() state = hass.states.get("weather.hometown") assert state.state == "rainy" data = state.attributes assert data.get(ATTR_WEATHER_TEMPERATURE) == 18.0 assert data.get(ATTR_WEATHER_HUMIDITY) == 71 assert data.get(ATTR_WEATHER_PRESSURE) == 1000.0 assert data.get(ATTR_WEATHER_WIND_SPEED) == 3.94 assert data.get(ATTR_WEATHER_WIND_BEARING) == "NW" assert state.attributes.get("friendly_name") == "HomeTown"
[ "async", "def", "test_setup_configuration", "(", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.ipma.weather.async_get_location\"", ",", "return_value", "=", "MockLocation", "(", ")", ",", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "weather", ".", "DOMAIN", ",", "{", "\"weather\"", ":", "{", "\"name\"", ":", "\"HomeTown\"", ",", "\"platform\"", ":", "\"ipma\"", ",", "\"mode\"", ":", "\"hourly\"", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"weather.hometown\"", ")", "assert", "state", ".", "state", "==", "\"rainy\"", "data", "=", "state", ".", "attributes", "assert", "data", ".", "get", "(", "ATTR_WEATHER_TEMPERATURE", ")", "==", "18.0", "assert", "data", ".", "get", "(", "ATTR_WEATHER_HUMIDITY", ")", "==", "71", "assert", "data", ".", "get", "(", "ATTR_WEATHER_PRESSURE", ")", "==", "1000.0", "assert", "data", ".", "get", "(", "ATTR_WEATHER_WIND_SPEED", ")", "==", "3.94", "assert", "data", ".", "get", "(", "ATTR_WEATHER_WIND_BEARING", ")", "==", "\"NW\"", "assert", "state", ".", "attributes", ".", "get", "(", "\"friendly_name\"", ")", "==", "\"HomeTown\"" ]
[ 131, 0 ]
[ 153, 62 ]
python
en
['en', 'en', 'en']
True
test_setup_config_flow
(hass)
Test for successfully setting up the IPMA platform.
Test for successfully setting up the IPMA platform.
async def test_setup_config_flow(hass): """Test for successfully setting up the IPMA platform.""" with patch( "homeassistant.components.ipma.weather.async_get_location", return_value=MockLocation(), ): entry = MockConfigEntry(domain="ipma", data=TEST_CONFIG) await hass.config_entries.async_forward_entry_setup(entry, WEATHER_DOMAIN) await hass.async_block_till_done() state = hass.states.get("weather.hometown") assert state.state == "rainy" data = state.attributes assert data.get(ATTR_WEATHER_TEMPERATURE) == 18.0 assert data.get(ATTR_WEATHER_HUMIDITY) == 71 assert data.get(ATTR_WEATHER_PRESSURE) == 1000.0 assert data.get(ATTR_WEATHER_WIND_SPEED) == 3.94 assert data.get(ATTR_WEATHER_WIND_BEARING) == "NW" assert state.attributes.get("friendly_name") == "HomeTown"
[ "async", "def", "test_setup_config_flow", "(", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.ipma.weather.async_get_location\"", ",", "return_value", "=", "MockLocation", "(", ")", ",", ")", ":", "entry", "=", "MockConfigEntry", "(", "domain", "=", "\"ipma\"", ",", "data", "=", "TEST_CONFIG", ")", "await", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "entry", ",", "WEATHER_DOMAIN", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"weather.hometown\"", ")", "assert", "state", ".", "state", "==", "\"rainy\"", "data", "=", "state", ".", "attributes", "assert", "data", ".", "get", "(", "ATTR_WEATHER_TEMPERATURE", ")", "==", "18.0", "assert", "data", ".", "get", "(", "ATTR_WEATHER_HUMIDITY", ")", "==", "71", "assert", "data", ".", "get", "(", "ATTR_WEATHER_PRESSURE", ")", "==", "1000.0", "assert", "data", ".", "get", "(", "ATTR_WEATHER_WIND_SPEED", ")", "==", "3.94", "assert", "data", ".", "get", "(", "ATTR_WEATHER_WIND_BEARING", ")", "==", "\"NW\"", "assert", "state", ".", "attributes", ".", "get", "(", "\"friendly_name\"", ")", "==", "\"HomeTown\"" ]
[ 156, 0 ]
[ 175, 62 ]
python
en
['en', 'en', 'en']
True
test_daily_forecast
(hass)
Test for successfully getting daily forecast.
Test for successfully getting daily forecast.
async def test_daily_forecast(hass): """Test for successfully getting daily forecast.""" with patch( "homeassistant.components.ipma.weather.async_get_location", return_value=MockLocation(), ): assert await async_setup_component( hass, weather.DOMAIN, {"weather": {"name": "HomeTown", "platform": "ipma", "mode": "daily"}}, ) await hass.async_block_till_done() state = hass.states.get("weather.hometown") assert state.state == "rainy" forecast = state.attributes.get(ATTR_FORECAST)[0] assert forecast.get(ATTR_FORECAST_TIME) == "2020-01-15T00:00:00" assert forecast.get(ATTR_FORECAST_CONDITION) == "rainy" assert forecast.get(ATTR_FORECAST_TEMP) == 16.2 assert forecast.get(ATTR_FORECAST_TEMP_LOW) == 10.6 assert forecast.get(ATTR_FORECAST_PRECIPITATION_PROBABILITY) == "100.0" assert forecast.get(ATTR_FORECAST_WIND_SPEED) == "10" assert forecast.get(ATTR_FORECAST_WIND_BEARING) == "S"
[ "async", "def", "test_daily_forecast", "(", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.ipma.weather.async_get_location\"", ",", "return_value", "=", "MockLocation", "(", ")", ",", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "weather", ".", "DOMAIN", ",", "{", "\"weather\"", ":", "{", "\"name\"", ":", "\"HomeTown\"", ",", "\"platform\"", ":", "\"ipma\"", ",", "\"mode\"", ":", "\"daily\"", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"weather.hometown\"", ")", "assert", "state", ".", "state", "==", "\"rainy\"", "forecast", "=", "state", ".", "attributes", ".", "get", "(", "ATTR_FORECAST", ")", "[", "0", "]", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_TIME", ")", "==", "\"2020-01-15T00:00:00\"", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_CONDITION", ")", "==", "\"rainy\"", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_TEMP", ")", "==", "16.2", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_TEMP_LOW", ")", "==", "10.6", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_PRECIPITATION_PROBABILITY", ")", "==", "\"100.0\"", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_WIND_SPEED", ")", "==", "\"10\"", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_WIND_BEARING", ")", "==", "\"S\"" ]
[ 178, 0 ]
[ 201, 58 ]
python
en
['en', 'en', 'en']
True
test_hourly_forecast
(hass)
Test for successfully getting daily forecast.
Test for successfully getting daily forecast.
async def test_hourly_forecast(hass): """Test for successfully getting daily forecast.""" with patch( "homeassistant.components.ipma.weather.async_get_location", return_value=MockLocation(), ): assert await async_setup_component( hass, weather.DOMAIN, {"weather": {"name": "HomeTown", "platform": "ipma", "mode": "hourly"}}, ) await hass.async_block_till_done() state = hass.states.get("weather.hometown") assert state.state == "rainy" forecast = state.attributes.get(ATTR_FORECAST)[0] assert forecast.get(ATTR_FORECAST_CONDITION) == "rainy" assert forecast.get(ATTR_FORECAST_TEMP) == 7.7 assert forecast.get(ATTR_FORECAST_PRECIPITATION_PROBABILITY) == 80.0 assert forecast.get(ATTR_FORECAST_WIND_SPEED) == "32.7" assert forecast.get(ATTR_FORECAST_WIND_BEARING) == "S"
[ "async", "def", "test_hourly_forecast", "(", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.ipma.weather.async_get_location\"", ",", "return_value", "=", "MockLocation", "(", ")", ",", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "weather", ".", "DOMAIN", ",", "{", "\"weather\"", ":", "{", "\"name\"", ":", "\"HomeTown\"", ",", "\"platform\"", ":", "\"ipma\"", ",", "\"mode\"", ":", "\"hourly\"", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"weather.hometown\"", ")", "assert", "state", ".", "state", "==", "\"rainy\"", "forecast", "=", "state", ".", "attributes", ".", "get", "(", "ATTR_FORECAST", ")", "[", "0", "]", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_CONDITION", ")", "==", "\"rainy\"", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_TEMP", ")", "==", "7.7", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_PRECIPITATION_PROBABILITY", ")", "==", "80.0", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_WIND_SPEED", ")", "==", "\"32.7\"", "assert", "forecast", ".", "get", "(", "ATTR_FORECAST_WIND_BEARING", ")", "==", "\"S\"" ]
[ 204, 0 ]
[ 225, 58 ]
python
en
['en', 'en', 'en']
True
MockLocation.observation
(self, api)
Mock Observation.
Mock Observation.
async def observation(self, api): """Mock Observation.""" Observation = namedtuple( "Observation", [ "accumulated_precipitation", "humidity", "pressure", "radiation", "temperature", "wind_direction", "wind_intensity_km", ], ) return Observation(0.0, 71.0, 1000.0, 0.0, 18.0, "NW", 3.94)
[ "async", "def", "observation", "(", "self", ",", "api", ")", ":", "Observation", "=", "namedtuple", "(", "\"Observation\"", ",", "[", "\"accumulated_precipitation\"", ",", "\"humidity\"", ",", "\"pressure\"", ",", "\"radiation\"", ",", "\"temperature\"", ",", "\"wind_direction\"", ",", "\"wind_intensity_km\"", ",", "]", ",", ")", "return", "Observation", "(", "0.0", ",", "71.0", ",", "1000.0", ",", "0.0", ",", "18.0", ",", "\"NW\"", ",", "3.94", ")" ]
[ 37, 4 ]
[ 52, 68 ]
python
en
['en', 'da', 'en']
False
MockLocation.forecast
(self, api)
Mock Forecast.
Mock Forecast.
async def forecast(self, api): """Mock Forecast.""" Forecast = namedtuple( "Forecast", [ "feels_like_temperature", "forecast_date", "forecasted_hours", "humidity", "max_temperature", "min_temperature", "precipitation_probability", "temperature", "update_date", "weather_type", "wind_direction", "wind_strength", ], ) return [ Forecast( None, "2020-01-15T00:00:00", 24, None, 16.2, 10.6, "100.0", 13.4, "2020-01-15T07:51:00", 9, "S", "10", ), Forecast( "7.7", now().utcnow().strftime("%Y-%m-%dT%H:%M:%S"), 1, "86.9", None, None, "80.0", 10.6, "2020-01-15T07:51:00", 10, "S", "32.7", ), ]
[ "async", "def", "forecast", "(", "self", ",", "api", ")", ":", "Forecast", "=", "namedtuple", "(", "\"Forecast\"", ",", "[", "\"feels_like_temperature\"", ",", "\"forecast_date\"", ",", "\"forecasted_hours\"", ",", "\"humidity\"", ",", "\"max_temperature\"", ",", "\"min_temperature\"", ",", "\"precipitation_probability\"", ",", "\"temperature\"", ",", "\"update_date\"", ",", "\"weather_type\"", ",", "\"wind_direction\"", ",", "\"wind_strength\"", ",", "]", ",", ")", "return", "[", "Forecast", "(", "None", ",", "\"2020-01-15T00:00:00\"", ",", "24", ",", "None", ",", "16.2", ",", "10.6", ",", "\"100.0\"", ",", "13.4", ",", "\"2020-01-15T07:51:00\"", ",", "9", ",", "\"S\"", ",", "\"10\"", ",", ")", ",", "Forecast", "(", "\"7.7\"", ",", "now", "(", ")", ".", "utcnow", "(", ")", ".", "strftime", "(", "\"%Y-%m-%dT%H:%M:%S\"", ")", ",", "1", ",", "\"86.9\"", ",", "None", ",", "None", ",", "\"80.0\"", ",", "10.6", ",", "\"2020-01-15T07:51:00\"", ",", "10", ",", "\"S\"", ",", "\"32.7\"", ",", ")", ",", "]" ]
[ 54, 4 ]
[ 103, 9 ]
python
en
['en', 'zu', 'en']
False
MockLocation.name
(self)
Mock location.
Mock location.
def name(self): """Mock location.""" return "HomeTown"
[ "def", "name", "(", "self", ")", ":", "return", "\"HomeTown\"" ]
[ 106, 4 ]
[ 108, 25 ]
python
en
['en', 'ja', 'en']
False
MockLocation.station_latitude
(self)
Mock latitude.
Mock latitude.
def station_latitude(self): """Mock latitude.""" return 0
[ "def", "station_latitude", "(", "self", ")", ":", "return", "0" ]
[ 111, 4 ]
[ 113, 16 ]
python
en
['et', 'xh', 'en']
False
MockLocation.global_id_local
(self)
Mock global identifier of the location.
Mock global identifier of the location.
def global_id_local(self): """Mock global identifier of the location.""" return 1130600
[ "def", "global_id_local", "(", "self", ")", ":", "return", "1130600" ]
[ 116, 4 ]
[ 118, 22 ]
python
en
['en', 'fy', 'en']
True
MockLocation.id_station
(self)
Mock identifier of the station.
Mock identifier of the station.
def id_station(self): """Mock identifier of the station.""" return 1200545
[ "def", "id_station", "(", "self", ")", ":", "return", "1200545" ]
[ 121, 4 ]
[ 123, 22 ]
python
en
['en', 'fy', 'en']
True
MockLocation.station_longitude
(self)
Mock longitude.
Mock longitude.
def station_longitude(self): """Mock longitude.""" return 0
[ "def", "station_longitude", "(", "self", ")", ":", "return", "0" ]
[ 126, 4 ]
[ 128, 16 ]
python
en
['et', 'xh', 'en']
False
count_flops_params
(model, x, custom_ops=None, verbose=True, mode='default')
Count FLOPs and Params of the given model. This function would identify the mask on the module and take the pruned shape into consideration. Note that, for sturctured pruning, we only identify the remained filters according to its mask, and do not take the pruned input channels into consideration, so the calculated FLOPs will be larger than real number. Parameters --------- model : nn.Module Target model. x : tuple or tensor The input shape of data (a tuple), a tensor or a tuple of tensor as input data. custom_ops : dict A mapping of (module -> torch.nn.Module : custom operation) the custom operation is a callback funtion to calculate the module flops and parameters, it will overwrite the default operation. for reference, please see ``ops`` in ``ModelProfiler``. verbose : bool If False, mute detail information about modules. Default is True. mode : str the mode of how to collect information. If the mode is set to ``default``, only the information of convolution and linear will be collected. If the mode is set to ``full``, other operations will also be collected. Returns ------- tuple of int, int and dict Representing total FLOPs, total parameters, and a detailed list of results respectively. The list of results are a list of dict, each of which contains (name, module_type, weight_shape, flops, params, input_size, output_size) as its keys.
Count FLOPs and Params of the given model. This function would identify the mask on the module and take the pruned shape into consideration. Note that, for sturctured pruning, we only identify the remained filters according to its mask, and do not take the pruned input channels into consideration, so the calculated FLOPs will be larger than real number.
def count_flops_params(model, x, custom_ops=None, verbose=True, mode='default'): """ Count FLOPs and Params of the given model. This function would identify the mask on the module and take the pruned shape into consideration. Note that, for sturctured pruning, we only identify the remained filters according to its mask, and do not take the pruned input channels into consideration, so the calculated FLOPs will be larger than real number. Parameters --------- model : nn.Module Target model. x : tuple or tensor The input shape of data (a tuple), a tensor or a tuple of tensor as input data. custom_ops : dict A mapping of (module -> torch.nn.Module : custom operation) the custom operation is a callback funtion to calculate the module flops and parameters, it will overwrite the default operation. for reference, please see ``ops`` in ``ModelProfiler``. verbose : bool If False, mute detail information about modules. Default is True. mode : str the mode of how to collect information. If the mode is set to ``default``, only the information of convolution and linear will be collected. If the mode is set to ``full``, other operations will also be collected. Returns ------- tuple of int, int and dict Representing total FLOPs, total parameters, and a detailed list of results respectively. The list of results are a list of dict, each of which contains (name, module_type, weight_shape, flops, params, input_size, output_size) as its keys. """ assert isinstance(x, tuple) or isinstance(x, torch.Tensor) assert mode in ['default', 'full'] original_device = next(model.parameters()).device training = model.training if isinstance(x, tuple) and all(isinstance(t, int) for t in x): x = (torch.zeros(x).to(original_device), ) elif torch.is_tensor(x): x = (x.to(original_device), ) else: x = (t.to(original_device) for t in x) handler_collection = [] profiler = ModelProfiler(custom_ops, mode) prev_m = None for name, m in model.named_modules(): # dealing with weight mask here if isinstance(prev_m, PrunerModuleWrapper): # weight mask is set to weight mask of its parent (wrapper) weight_mask = prev_m.weight_mask m.weight_mask = weight_mask prev_m = m if type(m) in profiler.ops: # if a leaf node _handler = m.register_forward_hook(functools.partial(profiler.count_module, name=name)) handler_collection.append(_handler) model.eval() with torch.no_grad(): model(*x) # restore origin status model.train(training).to(original_device) for handler in handler_collection: handler.remove() if verbose: # get detail information print(profiler.format_results()) print(f'FLOPs total: {profiler.sum_flops()}') print(f'#Params total: {profiler.sum_params()}') return profiler.sum_flops(), profiler.sum_params(), profiler.results
[ "def", "count_flops_params", "(", "model", ",", "x", ",", "custom_ops", "=", "None", ",", "verbose", "=", "True", ",", "mode", "=", "'default'", ")", ":", "assert", "isinstance", "(", "x", ",", "tuple", ")", "or", "isinstance", "(", "x", ",", "torch", ".", "Tensor", ")", "assert", "mode", "in", "[", "'default'", ",", "'full'", "]", "original_device", "=", "next", "(", "model", ".", "parameters", "(", ")", ")", ".", "device", "training", "=", "model", ".", "training", "if", "isinstance", "(", "x", ",", "tuple", ")", "and", "all", "(", "isinstance", "(", "t", ",", "int", ")", "for", "t", "in", "x", ")", ":", "x", "=", "(", "torch", ".", "zeros", "(", "x", ")", ".", "to", "(", "original_device", ")", ",", ")", "elif", "torch", ".", "is_tensor", "(", "x", ")", ":", "x", "=", "(", "x", ".", "to", "(", "original_device", ")", ",", ")", "else", ":", "x", "=", "(", "t", ".", "to", "(", "original_device", ")", "for", "t", "in", "x", ")", "handler_collection", "=", "[", "]", "profiler", "=", "ModelProfiler", "(", "custom_ops", ",", "mode", ")", "prev_m", "=", "None", "for", "name", ",", "m", "in", "model", ".", "named_modules", "(", ")", ":", "# dealing with weight mask here", "if", "isinstance", "(", "prev_m", ",", "PrunerModuleWrapper", ")", ":", "# weight mask is set to weight mask of its parent (wrapper)", "weight_mask", "=", "prev_m", ".", "weight_mask", "m", ".", "weight_mask", "=", "weight_mask", "prev_m", "=", "m", "if", "type", "(", "m", ")", "in", "profiler", ".", "ops", ":", "# if a leaf node", "_handler", "=", "m", ".", "register_forward_hook", "(", "functools", ".", "partial", "(", "profiler", ".", "count_module", ",", "name", "=", "name", ")", ")", "handler_collection", ".", "append", "(", "_handler", ")", "model", ".", "eval", "(", ")", "with", "torch", ".", "no_grad", "(", ")", ":", "model", "(", "*", "x", ")", "# restore origin status", "model", ".", "train", "(", "training", ")", ".", "to", "(", "original_device", ")", "for", "handler", "in", "handler_collection", ":", "handler", ".", "remove", "(", ")", "if", "verbose", ":", "# get detail information", "print", "(", "profiler", ".", "format_results", "(", ")", ")", "print", "(", "f'FLOPs total: {profiler.sum_flops()}'", ")", "print", "(", "f'#Params total: {profiler.sum_params()}'", ")", "return", "profiler", ".", "sum_flops", "(", ")", ",", "profiler", ".", "sum_params", "(", ")", ",", "profiler", ".", "results" ]
[ 330, 0 ]
[ 410, 72 ]
python
en
['en', 'error', 'th']
False
ModelProfiler.__init__
(self, custom_ops=None, mode='default')
ModelProfiler is used to share state to hooks. Parameters ---------- custom_ops: dict a mapping of (module -> torch.nn.Module : custom operation) the custom operation is a callback funtion to calculate the module flops, parameters and the weight shape, it will overwrite the default operation. for reference, please see ``self.ops``. mode: the mode of how to collect information. If the mode is set to `default`, only the information of convolution, linear and rnn modules will be collected. If the mode is set to `full`, other operations will also be collected.
ModelProfiler is used to share state to hooks.
def __init__(self, custom_ops=None, mode='default'): """ ModelProfiler is used to share state to hooks. Parameters ---------- custom_ops: dict a mapping of (module -> torch.nn.Module : custom operation) the custom operation is a callback funtion to calculate the module flops, parameters and the weight shape, it will overwrite the default operation. for reference, please see ``self.ops``. mode: the mode of how to collect information. If the mode is set to `default`, only the information of convolution, linear and rnn modules will be collected. If the mode is set to `full`, other operations will also be collected. """ self.ops = { nn.Conv1d: self._count_convNd, nn.Conv2d: self._count_convNd, nn.Conv3d: self._count_convNd, nn.ConvTranspose1d: self._count_convNd, nn.ConvTranspose2d: self._count_convNd, nn.ConvTranspose3d: self._count_convNd, nn.Linear: self._count_linear, nn.RNNCell: self._count_rnn_cell, nn.GRUCell: self._count_gru_cell, nn.LSTMCell: self._count_lstm_cell, nn.RNN: self._count_rnn, nn.GRU: self._count_gru, nn.LSTM: self._count_lstm } self._count_bias = False if mode == 'full': self.ops.update({ nn.BatchNorm1d: self._count_bn, nn.BatchNorm2d: self._count_bn, nn.BatchNorm3d: self._count_bn, nn.LeakyReLU: self._count_relu, nn.AvgPool1d: self._count_avgpool, nn.AvgPool2d: self._count_avgpool, nn.AvgPool3d: self._count_avgpool, nn.AdaptiveAvgPool1d: self._count_adap_avgpool, nn.AdaptiveAvgPool2d: self._count_adap_avgpool, nn.AdaptiveAvgPool3d: self._count_adap_avgpool, nn.Upsample: self._count_upsample, nn.UpsamplingBilinear2d: self._count_upsample, nn.UpsamplingNearest2d: self._count_upsample }) self._count_bias = True if custom_ops is not None: self.ops.update(custom_ops) self.mode = mode self.results = []
[ "def", "__init__", "(", "self", ",", "custom_ops", "=", "None", ",", "mode", "=", "'default'", ")", ":", "self", ".", "ops", "=", "{", "nn", ".", "Conv1d", ":", "self", ".", "_count_convNd", ",", "nn", ".", "Conv2d", ":", "self", ".", "_count_convNd", ",", "nn", ".", "Conv3d", ":", "self", ".", "_count_convNd", ",", "nn", ".", "ConvTranspose1d", ":", "self", ".", "_count_convNd", ",", "nn", ".", "ConvTranspose2d", ":", "self", ".", "_count_convNd", ",", "nn", ".", "ConvTranspose3d", ":", "self", ".", "_count_convNd", ",", "nn", ".", "Linear", ":", "self", ".", "_count_linear", ",", "nn", ".", "RNNCell", ":", "self", ".", "_count_rnn_cell", ",", "nn", ".", "GRUCell", ":", "self", ".", "_count_gru_cell", ",", "nn", ".", "LSTMCell", ":", "self", ".", "_count_lstm_cell", ",", "nn", ".", "RNN", ":", "self", ".", "_count_rnn", ",", "nn", ".", "GRU", ":", "self", ".", "_count_gru", ",", "nn", ".", "LSTM", ":", "self", ".", "_count_lstm", "}", "self", ".", "_count_bias", "=", "False", "if", "mode", "==", "'full'", ":", "self", ".", "ops", ".", "update", "(", "{", "nn", ".", "BatchNorm1d", ":", "self", ".", "_count_bn", ",", "nn", ".", "BatchNorm2d", ":", "self", ".", "_count_bn", ",", "nn", ".", "BatchNorm3d", ":", "self", ".", "_count_bn", ",", "nn", ".", "LeakyReLU", ":", "self", ".", "_count_relu", ",", "nn", ".", "AvgPool1d", ":", "self", ".", "_count_avgpool", ",", "nn", ".", "AvgPool2d", ":", "self", ".", "_count_avgpool", ",", "nn", ".", "AvgPool3d", ":", "self", ".", "_count_avgpool", ",", "nn", ".", "AdaptiveAvgPool1d", ":", "self", ".", "_count_adap_avgpool", ",", "nn", ".", "AdaptiveAvgPool2d", ":", "self", ".", "_count_adap_avgpool", ",", "nn", ".", "AdaptiveAvgPool3d", ":", "self", ".", "_count_adap_avgpool", ",", "nn", ".", "Upsample", ":", "self", ".", "_count_upsample", ",", "nn", ".", "UpsamplingBilinear2d", ":", "self", ".", "_count_upsample", ",", "nn", ".", "UpsamplingNearest2d", ":", "self", ".", "_count_upsample", "}", ")", "self", ".", "_count_bias", "=", "True", "if", "custom_ops", "is", "not", "None", ":", "self", ".", "ops", ".", "update", "(", "custom_ops", ")", "self", ".", "mode", "=", "mode", "self", ".", "results", "=", "[", "]" ]
[ 22, 4 ]
[ 76, 25 ]
python
en
['en', 'error', 'th']
False
MBart50TokenizerFast.get_special_tokens_mask
( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False )
Retrieves sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer ``prepare_for_model`` method. Args: token_ids_0 (:obj:`List[int]`): List of ids. token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not the token list is already formatted with special tokens for the model. Returns: :obj:`List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token.
Retrieves sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer ``prepare_for_model`` method.
def get_special_tokens_mask( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False ) -> List[int]: """ Retrieves sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer ``prepare_for_model`` method. Args: token_ids_0 (:obj:`List[int]`): List of ids. token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not the token list is already formatted with special tokens for the model. Returns: :obj:`List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. """ if already_has_special_tokens: if token_ids_1 is not None: raise ValueError( "You should not supply a second sequence if the provided sequence of " "ids is already formatted with special tokens for the model." ) return list(map(lambda x: 1 if x in [self.sep_token_id, self.cls_token_id] else 0, token_ids_0)) prefix_ones = [1] * len(self.prefix_tokens) suffix_ones = [1] * len(self.suffix_tokens) if token_ids_1 is None: return prefix_ones + ([0] * len(token_ids_0)) + suffix_ones return prefix_ones + ([0] * len(token_ids_0)) + ([0] * len(token_ids_1)) + suffix_ones
[ "def", "get_special_tokens_mask", "(", "self", ",", "token_ids_0", ":", "List", "[", "int", "]", ",", "token_ids_1", ":", "Optional", "[", "List", "[", "int", "]", "]", "=", "None", ",", "already_has_special_tokens", ":", "bool", "=", "False", ")", "->", "List", "[", "int", "]", ":", "if", "already_has_special_tokens", ":", "if", "token_ids_1", "is", "not", "None", ":", "raise", "ValueError", "(", "\"You should not supply a second sequence if the provided sequence of \"", "\"ids is already formatted with special tokens for the model.\"", ")", "return", "list", "(", "map", "(", "lambda", "x", ":", "1", "if", "x", "in", "[", "self", ".", "sep_token_id", ",", "self", ".", "cls_token_id", "]", "else", "0", ",", "token_ids_0", ")", ")", "prefix_ones", "=", "[", "1", "]", "*", "len", "(", "self", ".", "prefix_tokens", ")", "suffix_ones", "=", "[", "1", "]", "*", "len", "(", "self", ".", "suffix_tokens", ")", "if", "token_ids_1", "is", "None", ":", "return", "prefix_ones", "+", "(", "[", "0", "]", "*", "len", "(", "token_ids_0", ")", ")", "+", "suffix_ones", "return", "prefix_ones", "+", "(", "[", "0", "]", "*", "len", "(", "token_ids_0", ")", ")", "+", "(", "[", "0", "]", "*", "len", "(", "token_ids_1", ")", ")", "+", "suffix_ones" ]
[ 162, 4 ]
[ 192, 94 ]
python
en
['en', 'error', 'th']
False
MBart50TokenizerFast.build_inputs_with_special_tokens
( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None )
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. The special tokens depend on calling set_lang. An MBART-50 sequence has the following format, where ``X`` represents the sequence: - ``input_ids`` (for encoder) ``[src_lang_code] X [eos]`` - ``labels``: (for decoder) ``[tgt_lang_code] X [eos]`` BOS is never used. Pairs of sequences are not the expected use case, but they will be handled without a separator. Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: :obj:`List[int]`: list of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. The special tokens depend on calling set_lang.
def build_inputs_with_special_tokens( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. The special tokens depend on calling set_lang. An MBART-50 sequence has the following format, where ``X`` represents the sequence: - ``input_ids`` (for encoder) ``[src_lang_code] X [eos]`` - ``labels``: (for decoder) ``[tgt_lang_code] X [eos]`` BOS is never used. Pairs of sequences are not the expected use case, but they will be handled without a separator. Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: :obj:`List[int]`: list of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens. """ if token_ids_1 is None: return self.prefix_tokens + token_ids_0 + self.suffix_tokens # We don't expect to process pairs, but leave the pair logic for API consistency return self.prefix_tokens + token_ids_0 + token_ids_1 + self.suffix_tokens
[ "def", "build_inputs_with_special_tokens", "(", "self", ",", "token_ids_0", ":", "List", "[", "int", "]", ",", "token_ids_1", ":", "Optional", "[", "List", "[", "int", "]", "]", "=", "None", ")", "->", "List", "[", "int", "]", ":", "if", "token_ids_1", "is", "None", ":", "return", "self", ".", "prefix_tokens", "+", "token_ids_0", "+", "self", ".", "suffix_tokens", "# We don't expect to process pairs, but leave the pair logic for API consistency", "return", "self", ".", "prefix_tokens", "+", "token_ids_0", "+", "token_ids_1", "+", "self", ".", "suffix_tokens" ]
[ 194, 4 ]
[ 221, 82 ]
python
en
['en', 'error', 'th']
False
MBart50TokenizerFast.as_target_tokenizer
(self)
Temporarily sets the tokenizer for encoding the targets. Useful for tokenizer associated to sequence-to-sequence models that need a slightly different processing for the labels.
Temporarily sets the tokenizer for encoding the targets. Useful for tokenizer associated to sequence-to-sequence models that need a slightly different processing for the labels.
def as_target_tokenizer(self): """ Temporarily sets the tokenizer for encoding the targets. Useful for tokenizer associated to sequence-to-sequence models that need a slightly different processing for the labels. """ self.set_tgt_lang_special_tokens(self.tgt_lang) yield self.set_src_lang_special_tokens(self.src_lang)
[ "def", "as_target_tokenizer", "(", "self", ")", ":", "self", ".", "set_tgt_lang_special_tokens", "(", "self", ".", "tgt_lang", ")", "yield", "self", ".", "set_src_lang_special_tokens", "(", "self", ".", "src_lang", ")" ]
[ 236, 4 ]
[ 243, 55 ]
python
en
['en', 'error', 'th']
False
MBart50TokenizerFast.set_src_lang_special_tokens
(self, src_lang: str)
Reset the special tokens to the source lang setting. prefix=[src_lang_code] and suffix=[eos].
Reset the special tokens to the source lang setting. prefix=[src_lang_code] and suffix=[eos].
def set_src_lang_special_tokens(self, src_lang: str) -> None: """Reset the special tokens to the source lang setting. prefix=[src_lang_code] and suffix=[eos].""" self.cur_lang_code_id = self.convert_tokens_to_ids(src_lang) self.prefix_tokens = [self.cur_lang_code_id] self.suffix_tokens = [self.eos_token_id] prefix_tokens_str = self.convert_ids_to_tokens(self.prefix_tokens) suffix_tokens_str = self.convert_ids_to_tokens(self.suffix_tokens) self._tokenizer.post_processor = processors.TemplateProcessing( single=prefix_tokens_str + ["$A"] + suffix_tokens_str, pair=prefix_tokens_str + ["$A", "$B"] + suffix_tokens_str, special_tokens=list(zip(prefix_tokens_str + suffix_tokens_str, self.prefix_tokens + self.suffix_tokens)), )
[ "def", "set_src_lang_special_tokens", "(", "self", ",", "src_lang", ":", "str", ")", "->", "None", ":", "self", ".", "cur_lang_code_id", "=", "self", ".", "convert_tokens_to_ids", "(", "src_lang", ")", "self", ".", "prefix_tokens", "=", "[", "self", ".", "cur_lang_code_id", "]", "self", ".", "suffix_tokens", "=", "[", "self", ".", "eos_token_id", "]", "prefix_tokens_str", "=", "self", ".", "convert_ids_to_tokens", "(", "self", ".", "prefix_tokens", ")", "suffix_tokens_str", "=", "self", ".", "convert_ids_to_tokens", "(", "self", ".", "suffix_tokens", ")", "self", ".", "_tokenizer", ".", "post_processor", "=", "processors", ".", "TemplateProcessing", "(", "single", "=", "prefix_tokens_str", "+", "[", "\"$A\"", "]", "+", "suffix_tokens_str", ",", "pair", "=", "prefix_tokens_str", "+", "[", "\"$A\"", ",", "\"$B\"", "]", "+", "suffix_tokens_str", ",", "special_tokens", "=", "list", "(", "zip", "(", "prefix_tokens_str", "+", "suffix_tokens_str", ",", "self", ".", "prefix_tokens", "+", "self", ".", "suffix_tokens", ")", ")", ",", ")" ]
[ 245, 4 ]
[ 258, 9 ]
python
en
['en', 'en', 'en']
True
MBart50TokenizerFast.set_tgt_lang_special_tokens
(self, tgt_lang: str)
Reset the special tokens to the target language setting. prefix=[src_lang_code] and suffix=[eos].
Reset the special tokens to the target language setting. prefix=[src_lang_code] and suffix=[eos].
def set_tgt_lang_special_tokens(self, tgt_lang: str) -> None: """Reset the special tokens to the target language setting. prefix=[src_lang_code] and suffix=[eos].""" self.cur_lang_code_id = self.convert_tokens_to_ids(tgt_lang) self.prefix_tokens = [self.cur_lang_code_id] self.suffix_tokens = [self.eos_token_id] prefix_tokens_str = self.convert_ids_to_tokens(self.prefix_tokens) suffix_tokens_str = self.convert_ids_to_tokens(self.suffix_tokens) self._tokenizer.post_processor = processors.TemplateProcessing( single=prefix_tokens_str + ["$A"] + suffix_tokens_str, pair=prefix_tokens_str + ["$A", "$B"] + suffix_tokens_str, special_tokens=list(zip(prefix_tokens_str + suffix_tokens_str, self.prefix_tokens + self.suffix_tokens)), )
[ "def", "set_tgt_lang_special_tokens", "(", "self", ",", "tgt_lang", ":", "str", ")", "->", "None", ":", "self", ".", "cur_lang_code_id", "=", "self", ".", "convert_tokens_to_ids", "(", "tgt_lang", ")", "self", ".", "prefix_tokens", "=", "[", "self", ".", "cur_lang_code_id", "]", "self", ".", "suffix_tokens", "=", "[", "self", ".", "eos_token_id", "]", "prefix_tokens_str", "=", "self", ".", "convert_ids_to_tokens", "(", "self", ".", "prefix_tokens", ")", "suffix_tokens_str", "=", "self", ".", "convert_ids_to_tokens", "(", "self", ".", "suffix_tokens", ")", "self", ".", "_tokenizer", ".", "post_processor", "=", "processors", ".", "TemplateProcessing", "(", "single", "=", "prefix_tokens_str", "+", "[", "\"$A\"", "]", "+", "suffix_tokens_str", ",", "pair", "=", "prefix_tokens_str", "+", "[", "\"$A\"", ",", "\"$B\"", "]", "+", "suffix_tokens_str", ",", "special_tokens", "=", "list", "(", "zip", "(", "prefix_tokens_str", "+", "suffix_tokens_str", ",", "self", ".", "prefix_tokens", "+", "self", ".", "suffix_tokens", ")", ")", ",", ")" ]
[ 260, 4 ]
[ 273, 9 ]
python
en
['en', 'en', 'en']
True
Control4Validator.__init__
(self, host, username, password, hass)
Initialize.
Initialize.
def __init__(self, host, username, password, hass): """Initialize.""" self.host = host self.username = username self.password = password self.controller_unique_id = None self.director_bearer_token = None self.hass = hass
[ "def", "__init__", "(", "self", ",", "host", ",", "username", ",", "password", ",", "hass", ")", ":", "self", ".", "host", "=", "host", "self", ".", "username", "=", "username", "self", ".", "password", "=", "password", "self", ".", "controller_unique_id", "=", "None", "self", ".", "director_bearer_token", "=", "None", "self", ".", "hass", "=", "hass" ]
[ 38, 4 ]
[ 45, 24 ]
python
en
['en', 'en', 'it']
False
Control4Validator.authenticate
(self)
Test if we can authenticate with the Control4 account API.
Test if we can authenticate with the Control4 account API.
async def authenticate(self) -> bool: """Test if we can authenticate with the Control4 account API.""" try: account_session = aiohttp_client.async_get_clientsession(self.hass) account = C4Account(self.username, self.password, account_session) # Authenticate with Control4 account await account.getAccountBearerToken() # Get controller name account_controllers = await account.getAccountControllers() self.controller_unique_id = account_controllers["controllerCommonName"] # Get bearer token to communicate with controller locally self.director_bearer_token = ( await account.getDirectorBearerToken(self.controller_unique_id) )["token"] return True except (Unauthorized, NotFound): return False
[ "async", "def", "authenticate", "(", "self", ")", "->", "bool", ":", "try", ":", "account_session", "=", "aiohttp_client", ".", "async_get_clientsession", "(", "self", ".", "hass", ")", "account", "=", "C4Account", "(", "self", ".", "username", ",", "self", ".", "password", ",", "account_session", ")", "# Authenticate with Control4 account", "await", "account", ".", "getAccountBearerToken", "(", ")", "# Get controller name", "account_controllers", "=", "await", "account", ".", "getAccountControllers", "(", ")", "self", ".", "controller_unique_id", "=", "account_controllers", "[", "\"controllerCommonName\"", "]", "# Get bearer token to communicate with controller locally", "self", ".", "director_bearer_token", "=", "(", "await", "account", ".", "getDirectorBearerToken", "(", "self", ".", "controller_unique_id", ")", ")", "[", "\"token\"", "]", "return", "True", "except", "(", "Unauthorized", ",", "NotFound", ")", ":", "return", "False" ]
[ 47, 4 ]
[ 65, 24 ]
python
en
['en', 'en', 'en']
True
Control4Validator.connect_to_director
(self)
Test if we can connect to the local Control4 Director.
Test if we can connect to the local Control4 Director.
async def connect_to_director(self) -> bool: """Test if we can connect to the local Control4 Director.""" try: director_session = aiohttp_client.async_get_clientsession( self.hass, verify_ssl=False ) director = C4Director( self.host, self.director_bearer_token, director_session ) await director.getAllItemInfo() return True except (Unauthorized, ClientError, asyncioTimeoutError): _LOGGER.error("Failed to connect to the Control4 controller") return False
[ "async", "def", "connect_to_director", "(", "self", ")", "->", "bool", ":", "try", ":", "director_session", "=", "aiohttp_client", ".", "async_get_clientsession", "(", "self", ".", "hass", ",", "verify_ssl", "=", "False", ")", "director", "=", "C4Director", "(", "self", ".", "host", ",", "self", ".", "director_bearer_token", ",", "director_session", ")", "await", "director", ".", "getAllItemInfo", "(", ")", "return", "True", "except", "(", "Unauthorized", ",", "ClientError", ",", "asyncioTimeoutError", ")", ":", "_LOGGER", ".", "error", "(", "\"Failed to connect to the Control4 controller\"", ")", "return", "False" ]
[ 67, 4 ]
[ 80, 24 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_user
(self, user_input=None)
Handle the initial step.
Handle the initial step.
async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: hub = Control4Validator( user_input["host"], user_input["username"], user_input["password"], self.hass, ) try: if not await hub.authenticate(): raise InvalidAuth if not await hub.connect_to_director(): raise CannotConnect except InvalidAuth: errors["base"] = "invalid_auth" except CannotConnect: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if not errors: controller_unique_id = hub.controller_unique_id mac = (controller_unique_id.split("_", 3))[2] formatted_mac = format_mac(mac) await self.async_set_unique_id(formatted_mac) self._abort_if_unique_id_configured() return self.async_create_entry( title=controller_unique_id, data={ CONF_HOST: user_input["host"], CONF_USERNAME: user_input["username"], CONF_PASSWORD: user_input["password"], CONF_CONTROLLER_UNIQUE_ID: controller_unique_id, }, ) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "hub", "=", "Control4Validator", "(", "user_input", "[", "\"host\"", "]", ",", "user_input", "[", "\"username\"", "]", ",", "user_input", "[", "\"password\"", "]", ",", "self", ".", "hass", ",", ")", "try", ":", "if", "not", "await", "hub", ".", "authenticate", "(", ")", ":", "raise", "InvalidAuth", "if", "not", "await", "hub", ".", "connect_to_director", "(", ")", ":", "raise", "CannotConnect", "except", "InvalidAuth", ":", "errors", "[", "\"base\"", "]", "=", "\"invalid_auth\"", "except", "CannotConnect", ":", "errors", "[", "\"base\"", "]", "=", "\"cannot_connect\"", "except", "Exception", ":", "# pylint: disable=broad-except", "_LOGGER", ".", "exception", "(", "\"Unexpected exception\"", ")", "errors", "[", "\"base\"", "]", "=", "\"unknown\"", "if", "not", "errors", ":", "controller_unique_id", "=", "hub", ".", "controller_unique_id", "mac", "=", "(", "controller_unique_id", ".", "split", "(", "\"_\"", ",", "3", ")", ")", "[", "2", "]", "formatted_mac", "=", "format_mac", "(", "mac", ")", "await", "self", ".", "async_set_unique_id", "(", "formatted_mac", ")", "self", ".", "_abort_if_unique_id_configured", "(", ")", "return", "self", ".", "async_create_entry", "(", "title", "=", "controller_unique_id", ",", "data", "=", "{", "CONF_HOST", ":", "user_input", "[", "\"host\"", "]", ",", "CONF_USERNAME", ":", "user_input", "[", "\"username\"", "]", ",", "CONF_PASSWORD", ":", "user_input", "[", "\"password\"", "]", ",", "CONF_CONTROLLER_UNIQUE_ID", ":", "controller_unique_id", ",", "}", ",", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "DATA_SCHEMA", ",", "errors", "=", "errors", ")" ]
[ 89, 4 ]
[ 131, 9 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_get_options_flow
(config_entry)
Get the options flow for this handler.
Get the options flow for this handler.
def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry)
[ "def", "async_get_options_flow", "(", "config_entry", ")", ":", "return", "OptionsFlowHandler", "(", "config_entry", ")" ]
[ 135, 4 ]
[ 137, 47 ]
python
en
['en', 'en', 'en']
True
OptionsFlowHandler.__init__
(self, config_entry: config_entries.ConfigEntry)
Initialize options flow.
Initialize options flow.
def __init__(self, config_entry: config_entries.ConfigEntry): """Initialize options flow.""" self.config_entry = config_entry
[ "def", "__init__", "(", "self", ",", "config_entry", ":", "config_entries", ".", "ConfigEntry", ")", ":", "self", ".", "config_entry", "=", "config_entry" ]
[ 143, 4 ]
[ 145, 40 ]
python
en
['en', 'en', 'en']
True
OptionsFlowHandler.async_step_init
(self, user_input=None)
Handle options flow.
Handle options flow.
async def async_step_init(self, user_input=None): """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) data_schema = vol.Schema( { vol.Optional( CONF_SCAN_INTERVAL, default=self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ), ): vol.All(cv.positive_int, vol.Clamp(min=MIN_SCAN_INTERVAL)), } ) return self.async_show_form(step_id="init", data_schema=data_schema)
[ "async", "def", "async_step_init", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "not", "None", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "\"\"", ",", "data", "=", "user_input", ")", "data_schema", "=", "vol", ".", "Schema", "(", "{", "vol", ".", "Optional", "(", "CONF_SCAN_INTERVAL", ",", "default", "=", "self", ".", "config_entry", ".", "options", ".", "get", "(", "CONF_SCAN_INTERVAL", ",", "DEFAULT_SCAN_INTERVAL", ")", ",", ")", ":", "vol", ".", "All", "(", "cv", ".", "positive_int", ",", "vol", ".", "Clamp", "(", "min", "=", "MIN_SCAN_INTERVAL", ")", ")", ",", "}", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"init\"", ",", "data_schema", "=", "data_schema", ")" ]
[ 147, 4 ]
[ 162, 76 ]
python
en
['en', 'nl', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up 1-Wire platform.
Set up 1-Wire platform.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up 1-Wire platform.""" # Only OWServer implementation works with binary sensors if config_entry.data[CONF_TYPE] == CONF_TYPE_OWSERVER: onewirehub = hass.data[DOMAIN][config_entry.unique_id] entities = await hass.async_add_executor_job(get_entities, onewirehub) async_add_entities(entities, True)
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "# Only OWServer implementation works with binary sensors", "if", "config_entry", ".", "data", "[", "CONF_TYPE", "]", "==", "CONF_TYPE_OWSERVER", ":", "onewirehub", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry", ".", "unique_id", "]", "entities", "=", "await", "hass", ".", "async_add_executor_job", "(", "get_entities", ",", "onewirehub", ")", "async_add_entities", "(", "entities", ",", "True", ")" ]
[ 79, 0 ]
[ 86, 42 ]
python
en
['en', 'de', 'en']
True
get_entities
(onewirehub: OneWireHub)
Get a list of entities.
Get a list of entities.
def get_entities(onewirehub: OneWireHub): """Get a list of entities.""" entities = [] for device in onewirehub.devices: family = device["family"] device_type = device["type"] sensor_id = os.path.split(os.path.split(device["path"])[0])[1] if family not in DEVICE_BINARY_SENSORS: continue device_info = { "identifiers": {(DOMAIN, sensor_id)}, "manufacturer": "Maxim Integrated", "model": device_type, "name": sensor_id, } for device_sensor in DEVICE_BINARY_SENSORS[family]: device_file = os.path.join( os.path.split(device["path"])[0], device_sensor["path"] ) entities.append( OneWireProxyBinarySensor( sensor_id, device_file, device_sensor["type"], device_sensor["name"], device_info, device_sensor.get("default_disabled", False), onewirehub.owproxy, ) ) return entities
[ "def", "get_entities", "(", "onewirehub", ":", "OneWireHub", ")", ":", "entities", "=", "[", "]", "for", "device", "in", "onewirehub", ".", "devices", ":", "family", "=", "device", "[", "\"family\"", "]", "device_type", "=", "device", "[", "\"type\"", "]", "sensor_id", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "split", "(", "device", "[", "\"path\"", "]", ")", "[", "0", "]", ")", "[", "1", "]", "if", "family", "not", "in", "DEVICE_BINARY_SENSORS", ":", "continue", "device_info", "=", "{", "\"identifiers\"", ":", "{", "(", "DOMAIN", ",", "sensor_id", ")", "}", ",", "\"manufacturer\"", ":", "\"Maxim Integrated\"", ",", "\"model\"", ":", "device_type", ",", "\"name\"", ":", "sensor_id", ",", "}", "for", "device_sensor", "in", "DEVICE_BINARY_SENSORS", "[", "family", "]", ":", "device_file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "split", "(", "device", "[", "\"path\"", "]", ")", "[", "0", "]", ",", "device_sensor", "[", "\"path\"", "]", ")", "entities", ".", "append", "(", "OneWireProxyBinarySensor", "(", "sensor_id", ",", "device_file", ",", "device_sensor", "[", "\"type\"", "]", ",", "device_sensor", "[", "\"name\"", "]", ",", "device_info", ",", "device_sensor", ".", "get", "(", "\"default_disabled\"", ",", "False", ")", ",", "onewirehub", ".", "owproxy", ",", ")", ")", "return", "entities" ]
[ 89, 0 ]
[ 122, 19 ]
python
en
['en', 'en', 'en']
True
OneWireProxyBinarySensor.is_on
(self)
Return true if sensor is on.
Return true if sensor is on.
def is_on(self): """Return true if sensor is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 129, 4 ]
[ 131, 26 ]
python
en
['en', 'et', 'en']
True
mock_hass
()
Mock hass fixture.
Mock hass fixture.
def mock_hass(): """Mock hass fixture.""" return Mock(data={})
[ "def", "mock_hass", "(", ")", ":", "return", "Mock", "(", "data", "=", "{", "}", ")" ]
[ 9, 0 ]
[ 11, 24 ]
python
en
['en', 'nl', 'en']
True
test_singleton_async
(mock_hass)
Test singleton with async function.
Test singleton with async function.
async def test_singleton_async(mock_hass): """Test singleton with async function.""" @singleton.singleton("test_key") async def something(hass): return object() result1 = await something(mock_hass) result2 = await something(mock_hass) assert result1 is result2 assert "test_key" in mock_hass.data assert mock_hass.data["test_key"] is result1
[ "async", "def", "test_singleton_async", "(", "mock_hass", ")", ":", "@", "singleton", ".", "singleton", "(", "\"test_key\"", ")", "async", "def", "something", "(", "hass", ")", ":", "return", "object", "(", ")", "result1", "=", "await", "something", "(", "mock_hass", ")", "result2", "=", "await", "something", "(", "mock_hass", ")", "assert", "result1", "is", "result2", "assert", "\"test_key\"", "in", "mock_hass", ".", "data", "assert", "mock_hass", ".", "data", "[", "\"test_key\"", "]", "is", "result1" ]
[ 14, 0 ]
[ 25, 48 ]
python
en
['en', 'el-Latn', 'en']
True