Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
IPWebcamSensor.name | (self) | Return the name of the sensor, if any. | Return the name of the sensor, if any. | def name(self):
"""Return the name of the sensor, if any."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
46,
4
] | [
48,
25
] | python | en | ['en', 'en', 'en'] | True |
IPWebcamSensor.unit_of_measurement | (self) | Return the unit the value is expressed in. | Return the unit the value is expressed in. | def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unit"
] | [
51,
4
] | [
53,
25
] | python | en | ['en', 'en', 'en'] | True |
IPWebcamSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
56,
4
] | [
58,
26
] | python | en | ['en', 'en', 'en'] | True |
IPWebcamSensor.async_update | (self) | Retrieve latest state. | Retrieve latest state. | async def async_update(self):
"""Retrieve latest state."""
if self._sensor in ("audio_connections", "video_connections"):
if not self._ipcam.status_data:
return
self._state = self._ipcam.status_data.get(self._sensor)
self._unit = "Connections"
else:
self._state, self._unit = self._ipcam.export_sensor(self._sensor) | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"if",
"self",
".",
"_sensor",
"in",
"(",
"\"audio_connections\"",
",",
"\"video_connections\"",
")",
":",
"if",
"not",
"self",
".",
"_ipcam",
".",
"status_data",
":",
"return",
"self",
".",
"_state",
"=",
"self",
".",
"_ipcam",
".",
"status_data",
".",
"get",
"(",
"self",
".",
"_sensor",
")",
"self",
".",
"_unit",
"=",
"\"Connections\"",
"else",
":",
"self",
".",
"_state",
",",
"self",
".",
"_unit",
"=",
"self",
".",
"_ipcam",
".",
"export_sensor",
"(",
"self",
".",
"_sensor",
")"
] | [
60,
4
] | [
68,
77
] | python | en | ['es', 'sk', 'en'] | False |
IPWebcamSensor.icon | (self) | Return the icon for the sensor. | Return the icon for the sensor. | def icon(self):
"""Return the icon for the sensor."""
if self._sensor == "battery_level" and self._state is not None:
return icon_for_battery_level(int(self._state))
return ICON_MAP.get(self._sensor, "mdi:eye") | [
"def",
"icon",
"(",
"self",
")",
":",
"if",
"self",
".",
"_sensor",
"==",
"\"battery_level\"",
"and",
"self",
".",
"_state",
"is",
"not",
"None",
":",
"return",
"icon_for_battery_level",
"(",
"int",
"(",
"self",
".",
"_state",
")",
")",
"return",
"ICON_MAP",
".",
"get",
"(",
"self",
".",
"_sensor",
",",
"\"mdi:eye\"",
")"
] | [
71,
4
] | [
75,
52
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up cover controlled by shell commands. | Set up cover controlled by shell commands. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up cover controlled by shell commands."""
setup_reload_service(hass, DOMAIN, PLATFORMS)
devices = config.get(CONF_COVERS, {})
covers = []
for device_name, device_config in devices.items():
value_template = device_config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
covers.append(
CommandCover(
hass,
device_config.get(CONF_FRIENDLY_NAME, device_name),
device_config[CONF_COMMAND_OPEN],
device_config[CONF_COMMAND_CLOSE],
device_config[CONF_COMMAND_STOP],
device_config.get(CONF_COMMAND_STATE),
value_template,
device_config[CONF_COMMAND_TIMEOUT],
)
)
if not covers:
_LOGGER.error("No covers added")
return False
add_entities(covers) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"setup_reload_service",
"(",
"hass",
",",
"DOMAIN",
",",
"PLATFORMS",
")",
"devices",
"=",
"config",
".",
"get",
"(",
"CONF_COVERS",
",",
"{",
"}",
")",
"covers",
"=",
"[",
"]",
"for",
"device_name",
",",
"device_config",
"in",
"devices",
".",
"items",
"(",
")",
":",
"value_template",
"=",
"device_config",
".",
"get",
"(",
"CONF_VALUE_TEMPLATE",
")",
"if",
"value_template",
"is",
"not",
"None",
":",
"value_template",
".",
"hass",
"=",
"hass",
"covers",
".",
"append",
"(",
"CommandCover",
"(",
"hass",
",",
"device_config",
".",
"get",
"(",
"CONF_FRIENDLY_NAME",
",",
"device_name",
")",
",",
"device_config",
"[",
"CONF_COMMAND_OPEN",
"]",
",",
"device_config",
"[",
"CONF_COMMAND_CLOSE",
"]",
",",
"device_config",
"[",
"CONF_COMMAND_STOP",
"]",
",",
"device_config",
".",
"get",
"(",
"CONF_COMMAND_STATE",
")",
",",
"value_template",
",",
"device_config",
"[",
"CONF_COMMAND_TIMEOUT",
"]",
",",
")",
")",
"if",
"not",
"covers",
":",
"_LOGGER",
".",
"error",
"(",
"\"No covers added\"",
")",
"return",
"False",
"add_entities",
"(",
"covers",
")"
] | [
40,
0
] | [
70,
24
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.__init__ | (
self,
hass,
name,
command_open,
command_close,
command_stop,
command_state,
value_template,
timeout,
) | Initialize the cover. | Initialize the cover. | def __init__(
self,
hass,
name,
command_open,
command_close,
command_stop,
command_state,
value_template,
timeout,
):
"""Initialize the cover."""
self._hass = hass
self._name = name
self._state = None
self._command_open = command_open
self._command_close = command_close
self._command_stop = command_stop
self._command_state = command_state
self._value_template = value_template
self._timeout = timeout | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"name",
",",
"command_open",
",",
"command_close",
",",
"command_stop",
",",
"command_state",
",",
"value_template",
",",
"timeout",
",",
")",
":",
"self",
".",
"_hass",
"=",
"hass",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_command_open",
"=",
"command_open",
"self",
".",
"_command_close",
"=",
"command_close",
"self",
".",
"_command_stop",
"=",
"command_stop",
"self",
".",
"_command_state",
"=",
"command_state",
"self",
".",
"_value_template",
"=",
"value_template",
"self",
".",
"_timeout",
"=",
"timeout"
] | [
76,
4
] | [
96,
31
] | python | en | ['en', 'en', 'en'] | True |
CommandCover._move_cover | (self, command) | Execute the actual commands. | Execute the actual commands. | def _move_cover(self, command):
"""Execute the actual commands."""
_LOGGER.info("Running command: %s", command)
success = call_shell_with_timeout(command, self._timeout) == 0
if not success:
_LOGGER.error("Command failed: %s", command)
return success | [
"def",
"_move_cover",
"(",
"self",
",",
"command",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"Running command: %s\"",
",",
"command",
")",
"success",
"=",
"call_shell_with_timeout",
"(",
"command",
",",
"self",
".",
"_timeout",
")",
"==",
"0",
"if",
"not",
"success",
":",
"_LOGGER",
".",
"error",
"(",
"\"Command failed: %s\"",
",",
"command",
")",
"return",
"success"
] | [
98,
4
] | [
107,
22
] | python | en | ['en', 'en', 'en'] | True |
CommandCover._query_state_value | (self, command) | Execute state command for return value. | Execute state command for return value. | def _query_state_value(self, command):
"""Execute state command for return value."""
_LOGGER.info("Running state value command: %s", command)
return check_output_or_log(command, self._timeout) | [
"def",
"_query_state_value",
"(",
"self",
",",
"command",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"Running state value command: %s\"",
",",
"command",
")",
"return",
"check_output_or_log",
"(",
"command",
",",
"self",
".",
"_timeout",
")"
] | [
109,
4
] | [
112,
58
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.should_poll | (self) | Only poll if we have state command. | Only poll if we have state command. | def should_poll(self):
"""Only poll if we have state command."""
return self._command_state is not None | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"self",
".",
"_command_state",
"is",
"not",
"None"
] | [
115,
4
] | [
117,
46
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.name | (self) | Return the name of the cover. | Return the name of the cover. | def name(self):
"""Return the name of the cover."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
120,
4
] | [
122,
25
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.is_closed | (self) | Return if the cover is closed. | Return if the cover is closed. | def is_closed(self):
"""Return if the cover is closed."""
if self.current_cover_position is not None:
return self.current_cover_position == 0 | [
"def",
"is_closed",
"(",
"self",
")",
":",
"if",
"self",
".",
"current_cover_position",
"is",
"not",
"None",
":",
"return",
"self",
".",
"current_cover_position",
"==",
"0"
] | [
125,
4
] | [
128,
51
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.current_cover_position | (self) | Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
| Return current position of cover. | def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._state | [
"def",
"current_cover_position",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
131,
4
] | [
136,
26
] | python | en | ['en', 'en', 'en'] | True |
CommandCover._query_state | (self) | Query for the state. | Query for the state. | def _query_state(self):
"""Query for the state."""
if not self._command_state:
_LOGGER.error("No state command specified")
return
return self._query_state_value(self._command_state) | [
"def",
"_query_state",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_command_state",
":",
"_LOGGER",
".",
"error",
"(",
"\"No state command specified\"",
")",
"return",
"return",
"self",
".",
"_query_state_value",
"(",
"self",
".",
"_command_state",
")"
] | [
138,
4
] | [
143,
59
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.update | (self) | Update device state. | Update device state. | def update(self):
"""Update device state."""
if self._command_state:
payload = str(self._query_state())
if self._value_template:
payload = self._value_template.render_with_possible_json_value(payload)
self._state = int(payload) | [
"def",
"update",
"(",
"self",
")",
":",
"if",
"self",
".",
"_command_state",
":",
"payload",
"=",
"str",
"(",
"self",
".",
"_query_state",
"(",
")",
")",
"if",
"self",
".",
"_value_template",
":",
"payload",
"=",
"self",
".",
"_value_template",
".",
"render_with_possible_json_value",
"(",
"payload",
")",
"self",
".",
"_state",
"=",
"int",
"(",
"payload",
")"
] | [
145,
4
] | [
151,
38
] | python | en | ['fr', 'en', 'en'] | True |
CommandCover.open_cover | (self, **kwargs) | Open the cover. | Open the cover. | def open_cover(self, **kwargs):
"""Open the cover."""
self._move_cover(self._command_open) | [
"def",
"open_cover",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_move_cover",
"(",
"self",
".",
"_command_open",
")"
] | [
153,
4
] | [
155,
44
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.close_cover | (self, **kwargs) | Close the cover. | Close the cover. | def close_cover(self, **kwargs):
"""Close the cover."""
self._move_cover(self._command_close) | [
"def",
"close_cover",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_move_cover",
"(",
"self",
".",
"_command_close",
")"
] | [
157,
4
] | [
159,
45
] | python | en | ['en', 'en', 'en'] | True |
CommandCover.stop_cover | (self, **kwargs) | Stop the cover. | Stop the cover. | def stop_cover(self, **kwargs):
"""Stop the cover."""
self._move_cover(self._command_stop) | [
"def",
"stop_cover",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_move_cover",
"(",
"self",
".",
"_command_stop",
")"
] | [
161,
4
] | [
163,
44
] | python | en | ['en', 'en', 'en'] | True |
copyHdfsDirectoryToLocal | (hdfsDirectory, localDirectory, hdfsClient) | Copy directory from HDFS to local | Copy directory from HDFS to local | def copyHdfsDirectoryToLocal(hdfsDirectory, localDirectory, hdfsClient):
'''Copy directory from HDFS to local'''
if not os.path.exists(localDirectory):
os.makedirs(localDirectory)
try:
listing = hdfsClient.list_status(hdfsDirectory)
except Exception as exception:
nni_log(LogType.Error, 'List hdfs directory {0} error: {1}'.format(hdfsDirectory, str(exception)))
raise exception
for f in listing:
if f.type == 'DIRECTORY':
subHdfsDirectory = posixpath.join(hdfsDirectory, f.pathSuffix)
subLocalDirectory = os.path.join(localDirectory, f.pathSuffix)
copyHdfsDirectoryToLocal(subHdfsDirectory, subLocalDirectory, hdfsClient)
elif f.type == 'FILE':
hdfsFilePath = posixpath.join(hdfsDirectory, f.pathSuffix)
localFilePath = os.path.join(localDirectory, f.pathSuffix)
copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient)
else:
raise AssertionError('unexpected type {}'.format(f.type)) | [
"def",
"copyHdfsDirectoryToLocal",
"(",
"hdfsDirectory",
",",
"localDirectory",
",",
"hdfsClient",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"localDirectory",
")",
":",
"os",
".",
"makedirs",
"(",
"localDirectory",
")",
"try",
":",
"listing",
"=",
"hdfsClient",
".",
"list_status",
"(",
"hdfsDirectory",
")",
"except",
"Exception",
"as",
"exception",
":",
"nni_log",
"(",
"LogType",
".",
"Error",
",",
"'List hdfs directory {0} error: {1}'",
".",
"format",
"(",
"hdfsDirectory",
",",
"str",
"(",
"exception",
")",
")",
")",
"raise",
"exception",
"for",
"f",
"in",
"listing",
":",
"if",
"f",
".",
"type",
"==",
"'DIRECTORY'",
":",
"subHdfsDirectory",
"=",
"posixpath",
".",
"join",
"(",
"hdfsDirectory",
",",
"f",
".",
"pathSuffix",
")",
"subLocalDirectory",
"=",
"os",
".",
"path",
".",
"join",
"(",
"localDirectory",
",",
"f",
".",
"pathSuffix",
")",
"copyHdfsDirectoryToLocal",
"(",
"subHdfsDirectory",
",",
"subLocalDirectory",
",",
"hdfsClient",
")",
"elif",
"f",
".",
"type",
"==",
"'FILE'",
":",
"hdfsFilePath",
"=",
"posixpath",
".",
"join",
"(",
"hdfsDirectory",
",",
"f",
".",
"pathSuffix",
")",
"localFilePath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"localDirectory",
",",
"f",
".",
"pathSuffix",
")",
"copyHdfsFileToLocal",
"(",
"hdfsFilePath",
",",
"localFilePath",
",",
"hdfsClient",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"'unexpected type {}'",
".",
"format",
"(",
"f",
".",
"type",
")",
")"
] | [
7,
0
] | [
27,
69
] | python | en | ['en', 'en', 'en'] | True |
copyHdfsFileToLocal | (hdfsFilePath, localFilePath, hdfsClient, override=True) | Copy file from HDFS to local | Copy file from HDFS to local | def copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient, override=True):
'''Copy file from HDFS to local'''
if not hdfsClient.exists(hdfsFilePath):
raise Exception('HDFS file {} does not exist!'.format(hdfsFilePath))
try:
file_status = hdfsClient.get_file_status(hdfsFilePath)
if file_status.type != 'FILE':
raise Exception('HDFS file path {} is not a file'.format(hdfsFilePath))
except Exception as exception:
nni_log(LogType.Error, 'Get hdfs file {0} status error: {1}'.format(hdfsFilePath, str(exception)))
raise exception
if os.path.exists(localFilePath) and override:
os.remove(localFilePath)
try:
hdfsClient.copy_to_local(hdfsFilePath, localFilePath)
except Exception as exception:
nni_log(LogType.Error, 'Copy hdfs file {0} to {1} error: {2}'.format(hdfsFilePath, localFilePath, str(exception)))
raise exception
nni_log(LogType.Info, 'Successfully copied hdfs file {0} to {1}, {2} bytes'.format(hdfsFilePath, localFilePath, file_status.length)) | [
"def",
"copyHdfsFileToLocal",
"(",
"hdfsFilePath",
",",
"localFilePath",
",",
"hdfsClient",
",",
"override",
"=",
"True",
")",
":",
"if",
"not",
"hdfsClient",
".",
"exists",
"(",
"hdfsFilePath",
")",
":",
"raise",
"Exception",
"(",
"'HDFS file {} does not exist!'",
".",
"format",
"(",
"hdfsFilePath",
")",
")",
"try",
":",
"file_status",
"=",
"hdfsClient",
".",
"get_file_status",
"(",
"hdfsFilePath",
")",
"if",
"file_status",
".",
"type",
"!=",
"'FILE'",
":",
"raise",
"Exception",
"(",
"'HDFS file path {} is not a file'",
".",
"format",
"(",
"hdfsFilePath",
")",
")",
"except",
"Exception",
"as",
"exception",
":",
"nni_log",
"(",
"LogType",
".",
"Error",
",",
"'Get hdfs file {0} status error: {1}'",
".",
"format",
"(",
"hdfsFilePath",
",",
"str",
"(",
"exception",
")",
")",
")",
"raise",
"exception",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"localFilePath",
")",
"and",
"override",
":",
"os",
".",
"remove",
"(",
"localFilePath",
")",
"try",
":",
"hdfsClient",
".",
"copy_to_local",
"(",
"hdfsFilePath",
",",
"localFilePath",
")",
"except",
"Exception",
"as",
"exception",
":",
"nni_log",
"(",
"LogType",
".",
"Error",
",",
"'Copy hdfs file {0} to {1} error: {2}'",
".",
"format",
"(",
"hdfsFilePath",
",",
"localFilePath",
",",
"str",
"(",
"exception",
")",
")",
")",
"raise",
"exception",
"nni_log",
"(",
"LogType",
".",
"Info",
",",
"'Successfully copied hdfs file {0} to {1}, {2} bytes'",
".",
"format",
"(",
"hdfsFilePath",
",",
"localFilePath",
",",
"file_status",
".",
"length",
")",
")"
] | [
29,
0
] | [
48,
136
] | python | en | ['en', 'en', 'en'] | True |
copyDirectoryToHdfs | (localDirectory, hdfsDirectory, hdfsClient) | Copy directory from local to HDFS | Copy directory from local to HDFS | def copyDirectoryToHdfs(localDirectory, hdfsDirectory, hdfsClient):
'''Copy directory from local to HDFS'''
if not os.path.exists(localDirectory):
raise Exception('Local Directory does not exist!')
hdfsClient.mkdirs(hdfsDirectory)
result = True
for file in os.listdir(localDirectory):
file_path = os.path.join(localDirectory, file)
if os.path.isdir(file_path):
hdfs_directory = os.path.join(hdfsDirectory, file)
try:
result = result and copyDirectoryToHdfs(file_path, hdfs_directory, hdfsClient)
except Exception as exception:
nni_log(LogType.Error,
'Copy local directory {0} to hdfs directory {1} error: {2}'.format(file_path, hdfs_directory, str(exception)))
result = False
else:
hdfs_file_path = os.path.join(hdfsDirectory, file)
try:
result = result and copyFileToHdfs(file_path, hdfs_file_path, hdfsClient)
except Exception as exception:
nni_log(LogType.Error, 'Copy local file {0} to hdfs {1} error: {2}'.format(file_path, hdfs_file_path, str(exception)))
result = False
return result | [
"def",
"copyDirectoryToHdfs",
"(",
"localDirectory",
",",
"hdfsDirectory",
",",
"hdfsClient",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"localDirectory",
")",
":",
"raise",
"Exception",
"(",
"'Local Directory does not exist!'",
")",
"hdfsClient",
".",
"mkdirs",
"(",
"hdfsDirectory",
")",
"result",
"=",
"True",
"for",
"file",
"in",
"os",
".",
"listdir",
"(",
"localDirectory",
")",
":",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"localDirectory",
",",
"file",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"file_path",
")",
":",
"hdfs_directory",
"=",
"os",
".",
"path",
".",
"join",
"(",
"hdfsDirectory",
",",
"file",
")",
"try",
":",
"result",
"=",
"result",
"and",
"copyDirectoryToHdfs",
"(",
"file_path",
",",
"hdfs_directory",
",",
"hdfsClient",
")",
"except",
"Exception",
"as",
"exception",
":",
"nni_log",
"(",
"LogType",
".",
"Error",
",",
"'Copy local directory {0} to hdfs directory {1} error: {2}'",
".",
"format",
"(",
"file_path",
",",
"hdfs_directory",
",",
"str",
"(",
"exception",
")",
")",
")",
"result",
"=",
"False",
"else",
":",
"hdfs_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"hdfsDirectory",
",",
"file",
")",
"try",
":",
"result",
"=",
"result",
"and",
"copyFileToHdfs",
"(",
"file_path",
",",
"hdfs_file_path",
",",
"hdfsClient",
")",
"except",
"Exception",
"as",
"exception",
":",
"nni_log",
"(",
"LogType",
".",
"Error",
",",
"'Copy local file {0} to hdfs {1} error: {2}'",
".",
"format",
"(",
"file_path",
",",
"hdfs_file_path",
",",
"str",
"(",
"exception",
")",
")",
")",
"result",
"=",
"False",
"return",
"result"
] | [
50,
0
] | [
73,
17
] | python | en | ['en', 'en', 'en'] | True |
copyFileToHdfs | (localFilePath, hdfsFilePath, hdfsClient, override=True) | Copy a local file to HDFS directory | Copy a local file to HDFS directory | def copyFileToHdfs(localFilePath, hdfsFilePath, hdfsClient, override=True):
'''Copy a local file to HDFS directory'''
if not os.path.exists(localFilePath):
raise Exception('Local file Path does not exist!')
if os.path.isdir(localFilePath):
raise Exception('localFile should not a directory!')
if hdfsClient.exists(hdfsFilePath):
if override:
hdfsClient.delete(hdfsFilePath)
else:
return False
try:
hdfsClient.copy_from_local(localFilePath, hdfsFilePath)
return True
except Exception as exception:
nni_log(LogType.Error, 'Copy local file {0} to hdfs file {1} error: {2}'.format(localFilePath, hdfsFilePath, str(exception)))
return False | [
"def",
"copyFileToHdfs",
"(",
"localFilePath",
",",
"hdfsFilePath",
",",
"hdfsClient",
",",
"override",
"=",
"True",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"localFilePath",
")",
":",
"raise",
"Exception",
"(",
"'Local file Path does not exist!'",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"localFilePath",
")",
":",
"raise",
"Exception",
"(",
"'localFile should not a directory!'",
")",
"if",
"hdfsClient",
".",
"exists",
"(",
"hdfsFilePath",
")",
":",
"if",
"override",
":",
"hdfsClient",
".",
"delete",
"(",
"hdfsFilePath",
")",
"else",
":",
"return",
"False",
"try",
":",
"hdfsClient",
".",
"copy_from_local",
"(",
"localFilePath",
",",
"hdfsFilePath",
")",
"return",
"True",
"except",
"Exception",
"as",
"exception",
":",
"nni_log",
"(",
"LogType",
".",
"Error",
",",
"'Copy local file {0} to hdfs file {1} error: {2}'",
".",
"format",
"(",
"localFilePath",
",",
"hdfsFilePath",
",",
"str",
"(",
"exception",
")",
")",
")",
"return",
"False"
] | [
75,
0
] | [
91,
20
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) | Set up AdGuard Home sensor based on a config entry. | Set up AdGuard Home sensor based on a config entry. | async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up AdGuard Home sensor based on a config entry."""
adguard = hass.data[DOMAIN][DATA_ADGUARD_CLIENT]
try:
version = await adguard.version()
except AdGuardHomeConnectionError as exception:
raise PlatformNotReady from exception
hass.data[DOMAIN][DATA_ADGUARD_VERION] = version
sensors = [
AdGuardHomeDNSQueriesSensor(adguard),
AdGuardHomeBlockedFilteringSensor(adguard),
AdGuardHomePercentageBlockedSensor(adguard),
AdGuardHomeReplacedParentalSensor(adguard),
AdGuardHomeReplacedSafeBrowsingSensor(adguard),
AdGuardHomeReplacedSafeSearchSensor(adguard),
AdGuardHomeAverageProcessingTimeSensor(adguard),
AdGuardHomeRulesCountSensor(adguard),
]
async_add_entities(sensors, True) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"entry",
":",
"ConfigEntry",
",",
"async_add_entities",
")",
"->",
"None",
":",
"adguard",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_ADGUARD_CLIENT",
"]",
"try",
":",
"version",
"=",
"await",
"adguard",
".",
"version",
"(",
")",
"except",
"AdGuardHomeConnectionError",
"as",
"exception",
":",
"raise",
"PlatformNotReady",
"from",
"exception",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_ADGUARD_VERION",
"]",
"=",
"version",
"sensors",
"=",
"[",
"AdGuardHomeDNSQueriesSensor",
"(",
"adguard",
")",
",",
"AdGuardHomeBlockedFilteringSensor",
"(",
"adguard",
")",
",",
"AdGuardHomePercentageBlockedSensor",
"(",
"adguard",
")",
",",
"AdGuardHomeReplacedParentalSensor",
"(",
"adguard",
")",
",",
"AdGuardHomeReplacedSafeBrowsingSensor",
"(",
"adguard",
")",
",",
"AdGuardHomeReplacedSafeSearchSensor",
"(",
"adguard",
")",
",",
"AdGuardHomeAverageProcessingTimeSensor",
"(",
"adguard",
")",
",",
"AdGuardHomeRulesCountSensor",
"(",
"adguard",
")",
",",
"]",
"async_add_entities",
"(",
"sensors",
",",
"True",
")"
] | [
20,
0
] | [
44,
37
] | python | en | ['en', 'da', 'en'] | True |
AdGuardHomeSensor.__init__ | (
self,
adguard,
name: str,
icon: str,
measurement: str,
unit_of_measurement: str,
enabled_default: bool = True,
) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(
self,
adguard,
name: str,
icon: str,
measurement: str,
unit_of_measurement: str,
enabled_default: bool = True,
) -> None:
"""Initialize AdGuard Home sensor."""
self._state = None
self._unit_of_measurement = unit_of_measurement
self.measurement = measurement
super().__init__(adguard, name, icon, enabled_default) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
",",
"name",
":",
"str",
",",
"icon",
":",
"str",
",",
"measurement",
":",
"str",
",",
"unit_of_measurement",
":",
"str",
",",
"enabled_default",
":",
"bool",
"=",
"True",
",",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_unit_of_measurement",
"=",
"unit_of_measurement",
"self",
".",
"measurement",
"=",
"measurement",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"name",
",",
"icon",
",",
"enabled_default",
")"
] | [
50,
4
] | [
64,
62
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeSensor.unique_id | (self) | Return the unique ID for this sensor. | Return the unique ID for this sensor. | def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return "_".join(
[
DOMAIN,
self.adguard.host,
str(self.adguard.port),
"sensor",
self.measurement,
]
) | [
"def",
"unique_id",
"(",
"self",
")",
"->",
"str",
":",
"return",
"\"_\"",
".",
"join",
"(",
"[",
"DOMAIN",
",",
"self",
".",
"adguard",
".",
"host",
",",
"str",
"(",
"self",
".",
"adguard",
".",
"port",
")",
",",
"\"sensor\"",
",",
"self",
".",
"measurement",
",",
"]",
")"
] | [
67,
4
] | [
77,
9
] | python | en | ['en', 'la', 'en'] | True |
AdGuardHomeSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
80,
4
] | [
82,
26
] | python | en | ['en', 'en', 'en'] | True |
AdGuardHomeSensor.unit_of_measurement | (self) | Return the unit this state is expressed in. | Return the unit this state is expressed in. | def unit_of_measurement(self) -> str:
"""Return the unit this state is expressed in."""
return self._unit_of_measurement | [
"def",
"unit_of_measurement",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_unit_of_measurement"
] | [
85,
4
] | [
87,
40
] | python | en | ['en', 'en', 'en'] | True |
AdGuardHomeDNSQueriesSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard, "AdGuard DNS Queries", "mdi:magnify", "dns_queries", "queries"
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard DNS Queries\"",
",",
"\"mdi:magnify\"",
",",
"\"dns_queries\"",
",",
"\"queries\"",
")"
] | [
93,
4
] | [
97,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeDNSQueriesSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.dns_queries() | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"await",
"self",
".",
"adguard",
".",
"stats",
".",
"dns_queries",
"(",
")"
] | [
99,
4
] | [
101,
60
] | python | en | ['es', 'en', 'en'] | True |
AdGuardHomeBlockedFilteringSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard DNS Queries Blocked",
"mdi:magnify-close",
"blocked_filtering",
"queries",
enabled_default=False,
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard DNS Queries Blocked\"",
",",
"\"mdi:magnify-close\"",
",",
"\"blocked_filtering\"",
",",
"\"queries\"",
",",
"enabled_default",
"=",
"False",
",",
")"
] | [
107,
4
] | [
116,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeBlockedFilteringSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.blocked_filtering() | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"await",
"self",
".",
"adguard",
".",
"stats",
".",
"blocked_filtering",
"(",
")"
] | [
118,
4
] | [
120,
66
] | python | en | ['es', 'en', 'en'] | True |
AdGuardHomePercentageBlockedSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard DNS Queries Blocked Ratio",
"mdi:magnify-close",
"blocked_percentage",
PERCENTAGE,
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard DNS Queries Blocked Ratio\"",
",",
"\"mdi:magnify-close\"",
",",
"\"blocked_percentage\"",
",",
"PERCENTAGE",
",",
")"
] | [
126,
4
] | [
134,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomePercentageBlockedSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
percentage = await self.adguard.stats.blocked_percentage()
self._state = f"{percentage:.2f}" | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"percentage",
"=",
"await",
"self",
".",
"adguard",
".",
"stats",
".",
"blocked_percentage",
"(",
")",
"self",
".",
"_state",
"=",
"f\"{percentage:.2f}\""
] | [
136,
4
] | [
139,
41
] | python | en | ['es', 'en', 'en'] | True |
AdGuardHomeReplacedParentalSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Parental Control Blocked",
"mdi:human-male-girl",
"blocked_parental",
"requests",
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard Parental Control Blocked\"",
",",
"\"mdi:human-male-girl\"",
",",
"\"blocked_parental\"",
",",
"\"requests\"",
",",
")"
] | [
145,
4
] | [
153,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeReplacedParentalSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.replaced_parental() | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"await",
"self",
".",
"adguard",
".",
"stats",
".",
"replaced_parental",
"(",
")"
] | [
155,
4
] | [
157,
66
] | python | en | ['es', 'en', 'en'] | True |
AdGuardHomeReplacedSafeBrowsingSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Safe Browsing Blocked",
"mdi:shield-half-full",
"blocked_safebrowsing",
"requests",
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard Safe Browsing Blocked\"",
",",
"\"mdi:shield-half-full\"",
",",
"\"blocked_safebrowsing\"",
",",
"\"requests\"",
",",
")"
] | [
163,
4
] | [
171,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeReplacedSafeBrowsingSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.replaced_safebrowsing() | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"await",
"self",
".",
"adguard",
".",
"stats",
".",
"replaced_safebrowsing",
"(",
")"
] | [
173,
4
] | [
175,
70
] | python | en | ['es', 'en', 'en'] | True |
AdGuardHomeReplacedSafeSearchSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Safe Searches Enforced",
"mdi:shield-search",
"enforced_safesearch",
"requests",
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard Safe Searches Enforced\"",
",",
"\"mdi:shield-search\"",
",",
"\"enforced_safesearch\"",
",",
"\"requests\"",
",",
")"
] | [
181,
4
] | [
189,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeReplacedSafeSearchSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.replaced_safesearch() | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"await",
"self",
".",
"adguard",
".",
"stats",
".",
"replaced_safesearch",
"(",
")"
] | [
191,
4
] | [
193,
68
] | python | en | ['es', 'en', 'en'] | True |
AdGuardHomeAverageProcessingTimeSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Average Processing Speed",
"mdi:speedometer",
"average_speed",
TIME_MILLISECONDS,
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard Average Processing Speed\"",
",",
"\"mdi:speedometer\"",
",",
"\"average_speed\"",
",",
"TIME_MILLISECONDS",
",",
")"
] | [
199,
4
] | [
207,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeAverageProcessingTimeSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
average = await self.adguard.stats.avg_processing_time()
self._state = f"{average:.2f}" | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"average",
"=",
"await",
"self",
".",
"adguard",
".",
"stats",
".",
"avg_processing_time",
"(",
")",
"self",
".",
"_state",
"=",
"f\"{average:.2f}\""
] | [
209,
4
] | [
212,
38
] | python | en | ['es', 'en', 'en'] | True |
AdGuardHomeRulesCountSensor.__init__ | (self, adguard) | Initialize AdGuard Home sensor. | Initialize AdGuard Home sensor. | def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Rules Count",
"mdi:counter",
"rules_count",
"rules",
enabled_default=False,
) | [
"def",
"__init__",
"(",
"self",
",",
"adguard",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"adguard",
",",
"\"AdGuard Rules Count\"",
",",
"\"mdi:counter\"",
",",
"\"rules_count\"",
",",
"\"rules\"",
",",
"enabled_default",
"=",
"False",
",",
")"
] | [
218,
4
] | [
227,
9
] | python | en | ['en', 'pt', 'en'] | True |
AdGuardHomeRulesCountSensor._adguard_update | (self) | Update AdGuard Home entity. | Update AdGuard Home entity. | async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.filtering.rules_count() | [
"async",
"def",
"_adguard_update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"await",
"self",
".",
"adguard",
".",
"filtering",
".",
"rules_count",
"(",
")"
] | [
229,
4
] | [
231,
64
] | python | en | ['es', 'en', 'en'] | True |
get_scanner | (hass, config) | Validate the configuration and return a THOMSON scanner. | Validate the configuration and return a THOMSON scanner. | def get_scanner(hass, config):
"""Validate the configuration and return a THOMSON scanner."""
scanner = ThomsonDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None | [
"def",
"get_scanner",
"(",
"hass",
",",
"config",
")",
":",
"scanner",
"=",
"ThomsonDeviceScanner",
"(",
"config",
"[",
"DOMAIN",
"]",
")",
"return",
"scanner",
"if",
"scanner",
".",
"success_init",
"else",
"None"
] | [
36,
0
] | [
40,
52
] | python | en | ['en', 'en', 'en'] | True |
ThomsonDeviceScanner.__init__ | (self, config) | Initialize the scanner. | Initialize the scanner. | def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.last_results = {}
# Test the router is accessible.
data = self.get_thomson_data()
self.success_init = data is not None | [
"def",
"__init__",
"(",
"self",
",",
"config",
")",
":",
"self",
".",
"host",
"=",
"config",
"[",
"CONF_HOST",
"]",
"self",
".",
"username",
"=",
"config",
"[",
"CONF_USERNAME",
"]",
"self",
".",
"password",
"=",
"config",
"[",
"CONF_PASSWORD",
"]",
"self",
".",
"last_results",
"=",
"{",
"}",
"# Test the router is accessible.",
"data",
"=",
"self",
".",
"get_thomson_data",
"(",
")",
"self",
".",
"success_init",
"=",
"data",
"is",
"not",
"None"
] | [
46,
4
] | [
55,
44
] | python | en | ['en', 'en', 'en'] | True |
ThomsonDeviceScanner.scan_devices | (self) | Scan for new devices and return a list with found device IDs. | Scan for new devices and return a list with found device IDs. | def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [client["mac"] for client in self.last_results] | [
"def",
"scan_devices",
"(",
"self",
")",
":",
"self",
".",
"_update_info",
"(",
")",
"return",
"[",
"client",
"[",
"\"mac\"",
"]",
"for",
"client",
"in",
"self",
".",
"last_results",
"]"
] | [
57,
4
] | [
60,
62
] | python | en | ['en', 'en', 'en'] | True |
ThomsonDeviceScanner.get_device_name | (self, device) | Return the name of the given device or None if we don't know. | Return the name of the given device or None if we don't know. | def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
if not self.last_results:
return None
for client in self.last_results:
if client["mac"] == device:
return client["host"]
return None | [
"def",
"get_device_name",
"(",
"self",
",",
"device",
")",
":",
"if",
"not",
"self",
".",
"last_results",
":",
"return",
"None",
"for",
"client",
"in",
"self",
".",
"last_results",
":",
"if",
"client",
"[",
"\"mac\"",
"]",
"==",
"device",
":",
"return",
"client",
"[",
"\"host\"",
"]",
"return",
"None"
] | [
62,
4
] | [
69,
19
] | python | en | ['en', 'en', 'en'] | True |
ThomsonDeviceScanner._update_info | (self) | Ensure the information from the THOMSON router is up to date.
Return boolean if scanning successful.
| Ensure the information from the THOMSON router is up to date. | def _update_info(self):
"""Ensure the information from the THOMSON router is up to date.
Return boolean if scanning successful.
"""
if not self.success_init:
return False
_LOGGER.info("Checking ARP")
data = self.get_thomson_data()
if not data:
return False
# Flag C stands for CONNECTED
active_clients = [
client for client in data.values() if client["status"].find("C") != -1
]
self.last_results = active_clients
return True | [
"def",
"_update_info",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"success_init",
":",
"return",
"False",
"_LOGGER",
".",
"info",
"(",
"\"Checking ARP\"",
")",
"data",
"=",
"self",
".",
"get_thomson_data",
"(",
")",
"if",
"not",
"data",
":",
"return",
"False",
"# Flag C stands for CONNECTED",
"active_clients",
"=",
"[",
"client",
"for",
"client",
"in",
"data",
".",
"values",
"(",
")",
"if",
"client",
"[",
"\"status\"",
"]",
".",
"find",
"(",
"\"C\"",
")",
"!=",
"-",
"1",
"]",
"self",
".",
"last_results",
"=",
"active_clients",
"return",
"True"
] | [
71,
4
] | [
89,
19
] | python | en | ['en', 'en', 'en'] | True |
ThomsonDeviceScanner.get_thomson_data | (self) | Retrieve data from THOMSON and return parsed result. | Retrieve data from THOMSON and return parsed result. | def get_thomson_data(self):
"""Retrieve data from THOMSON and return parsed result."""
try:
telnet = telnetlib.Telnet(self.host)
telnet.read_until(b"Username : ")
telnet.write((self.username + "\r\n").encode("ascii"))
telnet.read_until(b"Password : ")
telnet.write((self.password + "\r\n").encode("ascii"))
telnet.read_until(b"=>")
telnet.write(b"hostmgr list\r\n")
devices_result = telnet.read_until(b"=>").split(b"\r\n")
telnet.write(b"exit\r\n")
except EOFError:
_LOGGER.exception("Unexpected response from router")
return
except ConnectionRefusedError:
_LOGGER.exception("Connection refused by router. Telnet enabled?")
return
devices = {}
for device in devices_result:
match = _DEVICES_REGEX.search(device.decode("utf-8"))
if match:
devices[match.group("ip")] = {
"ip": match.group("ip"),
"mac": match.group("mac").upper(),
"host": match.group("host"),
"status": match.group("status"),
}
return devices | [
"def",
"get_thomson_data",
"(",
"self",
")",
":",
"try",
":",
"telnet",
"=",
"telnetlib",
".",
"Telnet",
"(",
"self",
".",
"host",
")",
"telnet",
".",
"read_until",
"(",
"b\"Username : \"",
")",
"telnet",
".",
"write",
"(",
"(",
"self",
".",
"username",
"+",
"\"\\r\\n\"",
")",
".",
"encode",
"(",
"\"ascii\"",
")",
")",
"telnet",
".",
"read_until",
"(",
"b\"Password : \"",
")",
"telnet",
".",
"write",
"(",
"(",
"self",
".",
"password",
"+",
"\"\\r\\n\"",
")",
".",
"encode",
"(",
"\"ascii\"",
")",
")",
"telnet",
".",
"read_until",
"(",
"b\"=>\"",
")",
"telnet",
".",
"write",
"(",
"b\"hostmgr list\\r\\n\"",
")",
"devices_result",
"=",
"telnet",
".",
"read_until",
"(",
"b\"=>\"",
")",
".",
"split",
"(",
"b\"\\r\\n\"",
")",
"telnet",
".",
"write",
"(",
"b\"exit\\r\\n\"",
")",
"except",
"EOFError",
":",
"_LOGGER",
".",
"exception",
"(",
"\"Unexpected response from router\"",
")",
"return",
"except",
"ConnectionRefusedError",
":",
"_LOGGER",
".",
"exception",
"(",
"\"Connection refused by router. Telnet enabled?\"",
")",
"return",
"devices",
"=",
"{",
"}",
"for",
"device",
"in",
"devices_result",
":",
"match",
"=",
"_DEVICES_REGEX",
".",
"search",
"(",
"device",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
"if",
"match",
":",
"devices",
"[",
"match",
".",
"group",
"(",
"\"ip\"",
")",
"]",
"=",
"{",
"\"ip\"",
":",
"match",
".",
"group",
"(",
"\"ip\"",
")",
",",
"\"mac\"",
":",
"match",
".",
"group",
"(",
"\"mac\"",
")",
".",
"upper",
"(",
")",
",",
"\"host\"",
":",
"match",
".",
"group",
"(",
"\"host\"",
")",
",",
"\"status\"",
":",
"match",
".",
"group",
"(",
"\"status\"",
")",
",",
"}",
"return",
"devices"
] | [
91,
4
] | [
120,
22
] | python | en | ['en', 'en', 'en'] | True |
KeyphraseExtractor.normalize | (self, sent) |
Removes punctuation from a tokenized/tagged sentence and
lowercases words.
|
Removes punctuation from a tokenized/tagged sentence and
lowercases words.
| def normalize(self, sent):
"""
Removes punctuation from a tokenized/tagged sentence and
lowercases words.
"""
is_punct = lambda word: all(unicat(char).startswith('P') for char in word)
sent = filter(lambda t: not is_punct(t[0]), sent)
sent = map(lambda t: (t[0].lower(), t[1]), sent)
return list(sent) | [
"def",
"normalize",
"(",
"self",
",",
"sent",
")",
":",
"is_punct",
"=",
"lambda",
"word",
":",
"all",
"(",
"unicat",
"(",
"char",
")",
".",
"startswith",
"(",
"'P'",
")",
"for",
"char",
"in",
"word",
")",
"sent",
"=",
"filter",
"(",
"lambda",
"t",
":",
"not",
"is_punct",
"(",
"t",
"[",
"0",
"]",
")",
",",
"sent",
")",
"sent",
"=",
"map",
"(",
"lambda",
"t",
":",
"(",
"t",
"[",
"0",
"]",
".",
"lower",
"(",
")",
",",
"t",
"[",
"1",
"]",
")",
",",
"sent",
")",
"return",
"list",
"(",
"sent",
")"
] | [
24,
4
] | [
32,
25
] | python | en | ['en', 'error', 'th'] | False |
KeyphraseExtractor.extract_keyphrases | (self, document) |
For a document, parse sentences using our chunker created by
our grammar, converting the parse tree into a tagged sequence.
Yields extracted phrases.
|
For a document, parse sentences using our chunker created by
our grammar, converting the parse tree into a tagged sequence.
Yields extracted phrases.
| def extract_keyphrases(self, document):
"""
For a document, parse sentences using our chunker created by
our grammar, converting the parse tree into a tagged sequence.
Yields extracted phrases.
"""
for sents in document:
for sent in sents:
sent = self.normalize(sent)
if not sent: continue
chunks = tree2conlltags(self.chunker.parse(sent))
phrases = [
" ".join(word for word, pos, chunk in group).lower()
for key, group in groupby(
chunks, lambda term: term[-1] != 'O'
) if key
]
for phrase in phrases:
yield phrase | [
"def",
"extract_keyphrases",
"(",
"self",
",",
"document",
")",
":",
"for",
"sents",
"in",
"document",
":",
"for",
"sent",
"in",
"sents",
":",
"sent",
"=",
"self",
".",
"normalize",
"(",
"sent",
")",
"if",
"not",
"sent",
":",
"continue",
"chunks",
"=",
"tree2conlltags",
"(",
"self",
".",
"chunker",
".",
"parse",
"(",
"sent",
")",
")",
"phrases",
"=",
"[",
"\" \"",
".",
"join",
"(",
"word",
"for",
"word",
",",
"pos",
",",
"chunk",
"in",
"group",
")",
".",
"lower",
"(",
")",
"for",
"key",
",",
"group",
"in",
"groupby",
"(",
"chunks",
",",
"lambda",
"term",
":",
"term",
"[",
"-",
"1",
"]",
"!=",
"'O'",
")",
"if",
"key",
"]",
"for",
"phrase",
"in",
"phrases",
":",
"yield",
"phrase"
] | [
34,
4
] | [
52,
32
] | python | en | ['en', 'error', 'th'] | False |
deps_dir | () | Return path to deps directory. | Return path to deps directory. | def deps_dir():
"""Return path to deps directory."""
return os.path.abspath("/deps_dir") | [
"def",
"deps_dir",
"(",
")",
":",
"return",
"os",
".",
"path",
".",
"abspath",
"(",
"\"/deps_dir\"",
")"
] | [
34,
0
] | [
36,
39
] | python | en | ['en', 'en', 'en'] | True |
lib_dir | (deps_dir) | Return path to lib directory. | Return path to lib directory. | def lib_dir(deps_dir):
"""Return path to lib directory."""
return os.path.join(deps_dir, "lib_dir") | [
"def",
"lib_dir",
"(",
"deps_dir",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"deps_dir",
",",
"\"lib_dir\"",
")"
] | [
40,
0
] | [
42,
44
] | python | en | ['en', 'cy', 'en'] | True |
mock_popen | (lib_dir) | Return a Popen mock. | Return a Popen mock. | def mock_popen(lib_dir):
"""Return a Popen mock."""
with patch("homeassistant.util.package.Popen") as popen_mock:
popen_mock.return_value.communicate.return_value = (
bytes(lib_dir, "utf-8"),
b"error",
)
popen_mock.return_value.returncode = 0
yield popen_mock | [
"def",
"mock_popen",
"(",
"lib_dir",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.util.package.Popen\"",
")",
"as",
"popen_mock",
":",
"popen_mock",
".",
"return_value",
".",
"communicate",
".",
"return_value",
"=",
"(",
"bytes",
"(",
"lib_dir",
",",
"\"utf-8\"",
")",
",",
"b\"error\"",
",",
")",
"popen_mock",
".",
"return_value",
".",
"returncode",
"=",
"0",
"yield",
"popen_mock"
] | [
46,
0
] | [
54,
24
] | python | en | ['en', 'fy', 'en'] | True |
mock_env_copy | () | Mock os.environ.copy. | Mock os.environ.copy. | def mock_env_copy():
"""Mock os.environ.copy."""
with patch("homeassistant.util.package.os.environ.copy") as env_copy:
env_copy.return_value = {}
yield env_copy | [
"def",
"mock_env_copy",
"(",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.util.package.os.environ.copy\"",
")",
"as",
"env_copy",
":",
"env_copy",
".",
"return_value",
"=",
"{",
"}",
"yield",
"env_copy"
] | [
58,
0
] | [
62,
22
] | python | gl | ['gl', 'gl', 'pt'] | False |
mock_venv | () | Mock homeassistant.util.package.is_virtual_env. | Mock homeassistant.util.package.is_virtual_env. | def mock_venv():
"""Mock homeassistant.util.package.is_virtual_env."""
with patch("homeassistant.util.package.is_virtual_env") as mock:
mock.return_value = True
yield mock | [
"def",
"mock_venv",
"(",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.util.package.is_virtual_env\"",
")",
"as",
"mock",
":",
"mock",
".",
"return_value",
"=",
"True",
"yield",
"mock"
] | [
66,
0
] | [
70,
18
] | python | af | ['en', 'af', 'it'] | False |
mock_async_subprocess | () | Return an async Popen mock. | Return an async Popen mock. | def mock_async_subprocess():
"""Return an async Popen mock."""
async_popen = MagicMock()
async def communicate(input=None):
"""Communicate mock."""
stdout = bytes("/deps_dir/lib_dir", "utf-8")
return (stdout, None)
async_popen.communicate = communicate
return async_popen | [
"def",
"mock_async_subprocess",
"(",
")",
":",
"async_popen",
"=",
"MagicMock",
"(",
")",
"async",
"def",
"communicate",
"(",
"input",
"=",
"None",
")",
":",
"\"\"\"Communicate mock.\"\"\"",
"stdout",
"=",
"bytes",
"(",
"\"/deps_dir/lib_dir\"",
",",
"\"utf-8\"",
")",
"return",
"(",
"stdout",
",",
"None",
")",
"async_popen",
".",
"communicate",
"=",
"communicate",
"return",
"async_popen"
] | [
73,
0
] | [
83,
22
] | python | en | ['en', 'fy', 'en'] | True |
test_install | (mock_sys, mock_popen, mock_env_copy, mock_venv) | Test an install attempt on a package that doesn't exist. | Test an install attempt on a package that doesn't exist. | def test_install(mock_sys, mock_popen, mock_env_copy, mock_venv):
"""Test an install attempt on a package that doesn't exist."""
env = mock_env_copy()
assert package.install_package(TEST_NEW_REQ, False)
assert mock_popen.call_count == 1
assert mock_popen.call_args == call(
[mock_sys.executable, "-m", "pip", "install", "--quiet", TEST_NEW_REQ],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
env=env,
)
assert mock_popen.return_value.communicate.call_count == 1 | [
"def",
"test_install",
"(",
"mock_sys",
",",
"mock_popen",
",",
"mock_env_copy",
",",
"mock_venv",
")",
":",
"env",
"=",
"mock_env_copy",
"(",
")",
"assert",
"package",
".",
"install_package",
"(",
"TEST_NEW_REQ",
",",
"False",
")",
"assert",
"mock_popen",
".",
"call_count",
"==",
"1",
"assert",
"mock_popen",
".",
"call_args",
"==",
"call",
"(",
"[",
"mock_sys",
".",
"executable",
",",
"\"-m\"",
",",
"\"pip\"",
",",
"\"install\"",
",",
"\"--quiet\"",
",",
"TEST_NEW_REQ",
"]",
",",
"stdin",
"=",
"PIPE",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"env",
"=",
"env",
",",
")",
"assert",
"mock_popen",
".",
"return_value",
".",
"communicate",
".",
"call_count",
"==",
"1"
] | [
86,
0
] | [
98,
62
] | python | en | ['en', 'en', 'en'] | True |
test_install_upgrade | (mock_sys, mock_popen, mock_env_copy, mock_venv) | Test an upgrade attempt on a package. | Test an upgrade attempt on a package. | def test_install_upgrade(mock_sys, mock_popen, mock_env_copy, mock_venv):
"""Test an upgrade attempt on a package."""
env = mock_env_copy()
assert package.install_package(TEST_NEW_REQ)
assert mock_popen.call_count == 1
assert mock_popen.call_args == call(
[
mock_sys.executable,
"-m",
"pip",
"install",
"--quiet",
TEST_NEW_REQ,
"--upgrade",
],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
env=env,
)
assert mock_popen.return_value.communicate.call_count == 1 | [
"def",
"test_install_upgrade",
"(",
"mock_sys",
",",
"mock_popen",
",",
"mock_env_copy",
",",
"mock_venv",
")",
":",
"env",
"=",
"mock_env_copy",
"(",
")",
"assert",
"package",
".",
"install_package",
"(",
"TEST_NEW_REQ",
")",
"assert",
"mock_popen",
".",
"call_count",
"==",
"1",
"assert",
"mock_popen",
".",
"call_args",
"==",
"call",
"(",
"[",
"mock_sys",
".",
"executable",
",",
"\"-m\"",
",",
"\"pip\"",
",",
"\"install\"",
",",
"\"--quiet\"",
",",
"TEST_NEW_REQ",
",",
"\"--upgrade\"",
",",
"]",
",",
"stdin",
"=",
"PIPE",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"env",
"=",
"env",
",",
")",
"assert",
"mock_popen",
".",
"return_value",
".",
"communicate",
".",
"call_count",
"==",
"1"
] | [
101,
0
] | [
121,
62
] | python | en | ['en', 'en', 'en'] | True |
test_install_target | (mock_sys, mock_popen, mock_env_copy, mock_venv) | Test an install with a target. | Test an install with a target. | def test_install_target(mock_sys, mock_popen, mock_env_copy, mock_venv):
"""Test an install with a target."""
target = "target_folder"
env = mock_env_copy()
env["PYTHONUSERBASE"] = os.path.abspath(target)
mock_venv.return_value = False
mock_sys.platform = "linux"
args = [
mock_sys.executable,
"-m",
"pip",
"install",
"--quiet",
TEST_NEW_REQ,
"--user",
"--prefix=",
]
assert package.install_package(TEST_NEW_REQ, False, target=target)
assert mock_popen.call_count == 1
assert mock_popen.call_args == call(
args, stdin=PIPE, stdout=PIPE, stderr=PIPE, env=env
)
assert mock_popen.return_value.communicate.call_count == 1 | [
"def",
"test_install_target",
"(",
"mock_sys",
",",
"mock_popen",
",",
"mock_env_copy",
",",
"mock_venv",
")",
":",
"target",
"=",
"\"target_folder\"",
"env",
"=",
"mock_env_copy",
"(",
")",
"env",
"[",
"\"PYTHONUSERBASE\"",
"]",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"target",
")",
"mock_venv",
".",
"return_value",
"=",
"False",
"mock_sys",
".",
"platform",
"=",
"\"linux\"",
"args",
"=",
"[",
"mock_sys",
".",
"executable",
",",
"\"-m\"",
",",
"\"pip\"",
",",
"\"install\"",
",",
"\"--quiet\"",
",",
"TEST_NEW_REQ",
",",
"\"--user\"",
",",
"\"--prefix=\"",
",",
"]",
"assert",
"package",
".",
"install_package",
"(",
"TEST_NEW_REQ",
",",
"False",
",",
"target",
"=",
"target",
")",
"assert",
"mock_popen",
".",
"call_count",
"==",
"1",
"assert",
"mock_popen",
".",
"call_args",
"==",
"call",
"(",
"args",
",",
"stdin",
"=",
"PIPE",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"env",
"=",
"env",
")",
"assert",
"mock_popen",
".",
"return_value",
".",
"communicate",
".",
"call_count",
"==",
"1"
] | [
124,
0
] | [
147,
62
] | python | en | ['en', 'en', 'en'] | True |
test_install_target_venv | (mock_sys, mock_popen, mock_env_copy, mock_venv) | Test an install with a target in a virtual environment. | Test an install with a target in a virtual environment. | def test_install_target_venv(mock_sys, mock_popen, mock_env_copy, mock_venv):
"""Test an install with a target in a virtual environment."""
target = "target_folder"
with pytest.raises(AssertionError):
package.install_package(TEST_NEW_REQ, False, target=target) | [
"def",
"test_install_target_venv",
"(",
"mock_sys",
",",
"mock_popen",
",",
"mock_env_copy",
",",
"mock_venv",
")",
":",
"target",
"=",
"\"target_folder\"",
"with",
"pytest",
".",
"raises",
"(",
"AssertionError",
")",
":",
"package",
".",
"install_package",
"(",
"TEST_NEW_REQ",
",",
"False",
",",
"target",
"=",
"target",
")"
] | [
150,
0
] | [
154,
67
] | python | en | ['en', 'en', 'en'] | True |
test_install_error | (caplog, mock_sys, mock_popen, mock_venv) | Test an install with a target. | Test an install with a target. | def test_install_error(caplog, mock_sys, mock_popen, mock_venv):
"""Test an install with a target."""
caplog.set_level(logging.WARNING)
mock_popen.return_value.returncode = 1
assert not package.install_package(TEST_NEW_REQ)
assert len(caplog.records) == 1
for record in caplog.records:
assert record.levelname == "ERROR" | [
"def",
"test_install_error",
"(",
"caplog",
",",
"mock_sys",
",",
"mock_popen",
",",
"mock_venv",
")",
":",
"caplog",
".",
"set_level",
"(",
"logging",
".",
"WARNING",
")",
"mock_popen",
".",
"return_value",
".",
"returncode",
"=",
"1",
"assert",
"not",
"package",
".",
"install_package",
"(",
"TEST_NEW_REQ",
")",
"assert",
"len",
"(",
"caplog",
".",
"records",
")",
"==",
"1",
"for",
"record",
"in",
"caplog",
".",
"records",
":",
"assert",
"record",
".",
"levelname",
"==",
"\"ERROR\""
] | [
157,
0
] | [
164,
42
] | python | en | ['en', 'en', 'en'] | True |
test_install_constraint | (mock_sys, mock_popen, mock_env_copy, mock_venv) | Test install with constraint file on not installed package. | Test install with constraint file on not installed package. | def test_install_constraint(mock_sys, mock_popen, mock_env_copy, mock_venv):
"""Test install with constraint file on not installed package."""
env = mock_env_copy()
constraints = "constraints_file.txt"
assert package.install_package(TEST_NEW_REQ, False, constraints=constraints)
assert mock_popen.call_count == 1
assert mock_popen.call_args == call(
[
mock_sys.executable,
"-m",
"pip",
"install",
"--quiet",
TEST_NEW_REQ,
"--constraint",
constraints,
],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
env=env,
)
assert mock_popen.return_value.communicate.call_count == 1 | [
"def",
"test_install_constraint",
"(",
"mock_sys",
",",
"mock_popen",
",",
"mock_env_copy",
",",
"mock_venv",
")",
":",
"env",
"=",
"mock_env_copy",
"(",
")",
"constraints",
"=",
"\"constraints_file.txt\"",
"assert",
"package",
".",
"install_package",
"(",
"TEST_NEW_REQ",
",",
"False",
",",
"constraints",
"=",
"constraints",
")",
"assert",
"mock_popen",
".",
"call_count",
"==",
"1",
"assert",
"mock_popen",
".",
"call_args",
"==",
"call",
"(",
"[",
"mock_sys",
".",
"executable",
",",
"\"-m\"",
",",
"\"pip\"",
",",
"\"install\"",
",",
"\"--quiet\"",
",",
"TEST_NEW_REQ",
",",
"\"--constraint\"",
",",
"constraints",
",",
"]",
",",
"stdin",
"=",
"PIPE",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"env",
"=",
"env",
",",
")",
"assert",
"mock_popen",
".",
"return_value",
".",
"communicate",
".",
"call_count",
"==",
"1"
] | [
167,
0
] | [
189,
62
] | python | en | ['en', 'en', 'en'] | True |
test_install_find_links | (mock_sys, mock_popen, mock_env_copy, mock_venv) | Test install with find-links on not installed package. | Test install with find-links on not installed package. | def test_install_find_links(mock_sys, mock_popen, mock_env_copy, mock_venv):
"""Test install with find-links on not installed package."""
env = mock_env_copy()
link = "https://wheels-repository"
assert package.install_package(TEST_NEW_REQ, False, find_links=link)
assert mock_popen.call_count == 1
assert mock_popen.call_args == call(
[
mock_sys.executable,
"-m",
"pip",
"install",
"--quiet",
TEST_NEW_REQ,
"--find-links",
link,
"--prefer-binary",
],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
env=env,
)
assert mock_popen.return_value.communicate.call_count == 1 | [
"def",
"test_install_find_links",
"(",
"mock_sys",
",",
"mock_popen",
",",
"mock_env_copy",
",",
"mock_venv",
")",
":",
"env",
"=",
"mock_env_copy",
"(",
")",
"link",
"=",
"\"https://wheels-repository\"",
"assert",
"package",
".",
"install_package",
"(",
"TEST_NEW_REQ",
",",
"False",
",",
"find_links",
"=",
"link",
")",
"assert",
"mock_popen",
".",
"call_count",
"==",
"1",
"assert",
"mock_popen",
".",
"call_args",
"==",
"call",
"(",
"[",
"mock_sys",
".",
"executable",
",",
"\"-m\"",
",",
"\"pip\"",
",",
"\"install\"",
",",
"\"--quiet\"",
",",
"TEST_NEW_REQ",
",",
"\"--find-links\"",
",",
"link",
",",
"\"--prefer-binary\"",
",",
"]",
",",
"stdin",
"=",
"PIPE",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"env",
"=",
"env",
",",
")",
"assert",
"mock_popen",
".",
"return_value",
".",
"communicate",
".",
"call_count",
"==",
"1"
] | [
192,
0
] | [
215,
62
] | python | en | ['en', 'en', 'en'] | True |
test_async_get_user_site | (mock_env_copy) | Test async get user site directory. | Test async get user site directory. | async def test_async_get_user_site(mock_env_copy):
"""Test async get user site directory."""
deps_dir = "/deps_dir"
env = mock_env_copy()
env["PYTHONUSERBASE"] = os.path.abspath(deps_dir)
args = [sys.executable, "-m", "site", "--user-site"]
with patch(
"homeassistant.util.package.asyncio.create_subprocess_exec",
return_value=mock_async_subprocess(),
) as popen_mock:
ret = await package.async_get_user_site(deps_dir)
assert popen_mock.call_count == 1
assert popen_mock.call_args == call(
*args,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.DEVNULL,
env=env,
)
assert ret == os.path.join(deps_dir, "lib_dir") | [
"async",
"def",
"test_async_get_user_site",
"(",
"mock_env_copy",
")",
":",
"deps_dir",
"=",
"\"/deps_dir\"",
"env",
"=",
"mock_env_copy",
"(",
")",
"env",
"[",
"\"PYTHONUSERBASE\"",
"]",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"deps_dir",
")",
"args",
"=",
"[",
"sys",
".",
"executable",
",",
"\"-m\"",
",",
"\"site\"",
",",
"\"--user-site\"",
"]",
"with",
"patch",
"(",
"\"homeassistant.util.package.asyncio.create_subprocess_exec\"",
",",
"return_value",
"=",
"mock_async_subprocess",
"(",
")",
",",
")",
"as",
"popen_mock",
":",
"ret",
"=",
"await",
"package",
".",
"async_get_user_site",
"(",
"deps_dir",
")",
"assert",
"popen_mock",
".",
"call_count",
"==",
"1",
"assert",
"popen_mock",
".",
"call_args",
"==",
"call",
"(",
"*",
"args",
",",
"stdin",
"=",
"asyncio",
".",
"subprocess",
".",
"PIPE",
",",
"stdout",
"=",
"asyncio",
".",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"asyncio",
".",
"subprocess",
".",
"DEVNULL",
",",
"env",
"=",
"env",
",",
")",
"assert",
"ret",
"==",
"os",
".",
"path",
".",
"join",
"(",
"deps_dir",
",",
"\"lib_dir\"",
")"
] | [
218,
0
] | [
237,
51
] | python | en | ['fr', 'en', 'en'] | True |
test_check_package_global | () | Test for an installed package. | Test for an installed package. | def test_check_package_global():
"""Test for an installed package."""
installed_package = list(pkg_resources.working_set)[0].project_name
assert package.is_installed(installed_package) | [
"def",
"test_check_package_global",
"(",
")",
":",
"installed_package",
"=",
"list",
"(",
"pkg_resources",
".",
"working_set",
")",
"[",
"0",
"]",
".",
"project_name",
"assert",
"package",
".",
"is_installed",
"(",
"installed_package",
")"
] | [
240,
0
] | [
243,
50
] | python | en | ['en', 'en', 'en'] | True |
test_check_package_zip | () | Test for an installed zip package. | Test for an installed zip package. | def test_check_package_zip():
"""Test for an installed zip package."""
assert not package.is_installed(TEST_ZIP_REQ) | [
"def",
"test_check_package_zip",
"(",
")",
":",
"assert",
"not",
"package",
".",
"is_installed",
"(",
"TEST_ZIP_REQ",
")"
] | [
246,
0
] | [
248,
49
] | python | en | ['en', 'en', 'en'] | True |
ZWaveBaseEntity.__init__ | (self) | Initialize the base Z-Wave class. | Initialize the base Z-Wave class. | def __init__(self):
"""Initialize the base Z-Wave class."""
self._update_scheduled = False | [
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"_update_scheduled",
"=",
"False"
] | [
65,
4
] | [
67,
38
] | python | en | ['en', 'en', 'en'] | True |
ZWaveBaseEntity.maybe_schedule_update | (self) | Maybe schedule state update.
If value changed after device was created but before setup_platform
was called - skip updating state.
| Maybe schedule state update. | def maybe_schedule_update(self):
"""Maybe schedule state update.
If value changed after device was created but before setup_platform
was called - skip updating state.
"""
if self.hass and not self._update_scheduled:
self.hass.add_job(self._schedule_update) | [
"def",
"maybe_schedule_update",
"(",
"self",
")",
":",
"if",
"self",
".",
"hass",
"and",
"not",
"self",
".",
"_update_scheduled",
":",
"self",
".",
"hass",
".",
"add_job",
"(",
"self",
".",
"_schedule_update",
")"
] | [
69,
4
] | [
76,
52
] | python | en | ['de', 'en', 'en'] | True |
ZWaveBaseEntity._schedule_update | (self) | Schedule delayed update. | Schedule delayed update. | def _schedule_update(self):
"""Schedule delayed update."""
if self._update_scheduled:
return
@callback
def do_update():
"""Really update."""
self.async_write_ha_state()
self._update_scheduled = False
self._update_scheduled = True
self.hass.loop.call_later(0.1, do_update) | [
"def",
"_schedule_update",
"(",
"self",
")",
":",
"if",
"self",
".",
"_update_scheduled",
":",
"return",
"@",
"callback",
"def",
"do_update",
"(",
")",
":",
"\"\"\"Really update.\"\"\"",
"self",
".",
"async_write_ha_state",
"(",
")",
"self",
".",
"_update_scheduled",
"=",
"False",
"self",
".",
"_update_scheduled",
"=",
"True",
"self",
".",
"hass",
".",
"loop",
".",
"call_later",
"(",
"0.1",
",",
"do_update",
")"
] | [
79,
4
] | [
91,
49
] | python | de | ['it', 'de', 'en'] | False |
ZWaveBaseEntity.try_remove_and_add | (self) | Remove this entity and add it back. | Remove this entity and add it back. | def try_remove_and_add(self):
"""Remove this entity and add it back."""
async def _async_remove_and_add():
await self.async_remove()
self.entity_id = None
await self.platform.async_add_entities([self])
if self.hass and self.platform:
self.hass.add_job(_async_remove_and_add) | [
"def",
"try_remove_and_add",
"(",
"self",
")",
":",
"async",
"def",
"_async_remove_and_add",
"(",
")",
":",
"await",
"self",
".",
"async_remove",
"(",
")",
"self",
".",
"entity_id",
"=",
"None",
"await",
"self",
".",
"platform",
".",
"async_add_entities",
"(",
"[",
"self",
"]",
")",
"if",
"self",
".",
"hass",
"and",
"self",
".",
"platform",
":",
"self",
".",
"hass",
".",
"add_job",
"(",
"_async_remove_and_add",
")"
] | [
93,
4
] | [
102,
52
] | python | en | ['en', 'en', 'en'] | True |
ZWaveBaseEntity.node_removed | (self) | Call when a node is removed from the Z-Wave network. | Call when a node is removed from the Z-Wave network. | async def node_removed(self):
"""Call when a node is removed from the Z-Wave network."""
await self.async_remove()
registry = await async_get_registry(self.hass)
if self.entity_id not in registry.entities:
return
registry.async_remove(self.entity_id) | [
"async",
"def",
"node_removed",
"(",
"self",
")",
":",
"await",
"self",
".",
"async_remove",
"(",
")",
"registry",
"=",
"await",
"async_get_registry",
"(",
"self",
".",
"hass",
")",
"if",
"self",
".",
"entity_id",
"not",
"in",
"registry",
".",
"entities",
":",
"return",
"registry",
".",
"async_remove",
"(",
"self",
".",
"entity_id",
")"
] | [
104,
4
] | [
112,
45
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.__init__ | (self, node, network) | Initialize node. | Initialize node. | def __init__(self, node, network):
"""Initialize node."""
# pylint: disable=import-error
super().__init__()
from openzwave.network import ZWaveNetwork
from pydispatch import dispatcher
self._network = network
self.node = node
self.node_id = self.node.node_id
self._name = node_name(self.node)
self._product_name = node.product_name
self._manufacturer_name = node.manufacturer_name
self._unique_id = self._compute_unique_id()
self._application_version = None
self._attributes = {}
self.wakeup_interval = None
self.location = None
self.battery_level = None
dispatcher.connect(
self.network_node_value_added, ZWaveNetwork.SIGNAL_VALUE_ADDED
)
dispatcher.connect(self.network_node_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED)
dispatcher.connect(self.network_node_changed, ZWaveNetwork.SIGNAL_NODE)
dispatcher.connect(self.network_node_changed, ZWaveNetwork.SIGNAL_NOTIFICATION)
dispatcher.connect(self.network_node_event, ZWaveNetwork.SIGNAL_NODE_EVENT)
dispatcher.connect(
self.network_scene_activated, ZWaveNetwork.SIGNAL_SCENE_EVENT
) | [
"def",
"__init__",
"(",
"self",
",",
"node",
",",
"network",
")",
":",
"# pylint: disable=import-error",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"from",
"openzwave",
".",
"network",
"import",
"ZWaveNetwork",
"from",
"pydispatch",
"import",
"dispatcher",
"self",
".",
"_network",
"=",
"network",
"self",
".",
"node",
"=",
"node",
"self",
".",
"node_id",
"=",
"self",
".",
"node",
".",
"node_id",
"self",
".",
"_name",
"=",
"node_name",
"(",
"self",
".",
"node",
")",
"self",
".",
"_product_name",
"=",
"node",
".",
"product_name",
"self",
".",
"_manufacturer_name",
"=",
"node",
".",
"manufacturer_name",
"self",
".",
"_unique_id",
"=",
"self",
".",
"_compute_unique_id",
"(",
")",
"self",
".",
"_application_version",
"=",
"None",
"self",
".",
"_attributes",
"=",
"{",
"}",
"self",
".",
"wakeup_interval",
"=",
"None",
"self",
".",
"location",
"=",
"None",
"self",
".",
"battery_level",
"=",
"None",
"dispatcher",
".",
"connect",
"(",
"self",
".",
"network_node_value_added",
",",
"ZWaveNetwork",
".",
"SIGNAL_VALUE_ADDED",
")",
"dispatcher",
".",
"connect",
"(",
"self",
".",
"network_node_changed",
",",
"ZWaveNetwork",
".",
"SIGNAL_VALUE_CHANGED",
")",
"dispatcher",
".",
"connect",
"(",
"self",
".",
"network_node_changed",
",",
"ZWaveNetwork",
".",
"SIGNAL_NODE",
")",
"dispatcher",
".",
"connect",
"(",
"self",
".",
"network_node_changed",
",",
"ZWaveNetwork",
".",
"SIGNAL_NOTIFICATION",
")",
"dispatcher",
".",
"connect",
"(",
"self",
".",
"network_node_event",
",",
"ZWaveNetwork",
".",
"SIGNAL_NODE_EVENT",
")",
"dispatcher",
".",
"connect",
"(",
"self",
".",
"network_scene_activated",
",",
"ZWaveNetwork",
".",
"SIGNAL_SCENE_EVENT",
")"
] | [
118,
4
] | [
146,
9
] | python | en | ['en', 'en', 'it'] | False |
ZWaveNodeEntity.unique_id | (self) | Return unique ID of Z-wave node. | Return unique ID of Z-wave node. | def unique_id(self):
"""Return unique ID of Z-wave node."""
return self._unique_id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unique_id"
] | [
149,
4
] | [
151,
30
] | python | en | ['en', 'fr', 'en'] | True |
ZWaveNodeEntity.device_info | (self) | Return device information. | Return device information. | def device_info(self):
"""Return device information."""
identifier, name = node_device_id_and_name(self.node)
info = {
"identifiers": {identifier},
"manufacturer": self.node.manufacturer_name,
"model": self.node.product_name,
"name": name,
}
if self.node_id > 1:
info["via_device"] = (DOMAIN, 1)
return info | [
"def",
"device_info",
"(",
"self",
")",
":",
"identifier",
",",
"name",
"=",
"node_device_id_and_name",
"(",
"self",
".",
"node",
")",
"info",
"=",
"{",
"\"identifiers\"",
":",
"{",
"identifier",
"}",
",",
"\"manufacturer\"",
":",
"self",
".",
"node",
".",
"manufacturer_name",
",",
"\"model\"",
":",
"self",
".",
"node",
".",
"product_name",
",",
"\"name\"",
":",
"name",
",",
"}",
"if",
"self",
".",
"node_id",
">",
"1",
":",
"info",
"[",
"\"via_device\"",
"]",
"=",
"(",
"DOMAIN",
",",
"1",
")",
"return",
"info"
] | [
154,
4
] | [
165,
19
] | python | da | ['es', 'da', 'en'] | False |
ZWaveNodeEntity.maybe_update_application_version | (self, value) | Update application version if value is a Command Class Version, Application Value. | Update application version if value is a Command Class Version, Application Value. | def maybe_update_application_version(self, value):
"""Update application version if value is a Command Class Version, Application Value."""
if (
value
and value.command_class == COMMAND_CLASS_VERSION
and value.label == "Application Version"
):
self._application_version = value.data | [
"def",
"maybe_update_application_version",
"(",
"self",
",",
"value",
")",
":",
"if",
"(",
"value",
"and",
"value",
".",
"command_class",
"==",
"COMMAND_CLASS_VERSION",
"and",
"value",
".",
"label",
"==",
"\"Application Version\"",
")",
":",
"self",
".",
"_application_version",
"=",
"value",
".",
"data"
] | [
167,
4
] | [
174,
50
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.network_node_value_added | (self, node=None, value=None, args=None) | Handle a added value to a none on the network. | Handle a added value to a none on the network. | def network_node_value_added(self, node=None, value=None, args=None):
"""Handle a added value to a none on the network."""
if node and node.node_id != self.node_id:
return
if args is not None and "nodeId" in args and args["nodeId"] != self.node_id:
return
self.maybe_update_application_version(value) | [
"def",
"network_node_value_added",
"(",
"self",
",",
"node",
"=",
"None",
",",
"value",
"=",
"None",
",",
"args",
"=",
"None",
")",
":",
"if",
"node",
"and",
"node",
".",
"node_id",
"!=",
"self",
".",
"node_id",
":",
"return",
"if",
"args",
"is",
"not",
"None",
"and",
"\"nodeId\"",
"in",
"args",
"and",
"args",
"[",
"\"nodeId\"",
"]",
"!=",
"self",
".",
"node_id",
":",
"return",
"self",
".",
"maybe_update_application_version",
"(",
"value",
")"
] | [
176,
4
] | [
183,
52
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.network_node_changed | (self, node=None, value=None, args=None) | Handle a changed node on the network. | Handle a changed node on the network. | def network_node_changed(self, node=None, value=None, args=None):
"""Handle a changed node on the network."""
if node and node.node_id != self.node_id:
return
if args is not None and "nodeId" in args and args["nodeId"] != self.node_id:
return
# Process central scene activation
if value is not None and value.command_class == COMMAND_CLASS_CENTRAL_SCENE:
self.central_scene_activated(value.index, value.data)
self.maybe_update_application_version(value)
self.node_changed() | [
"def",
"network_node_changed",
"(",
"self",
",",
"node",
"=",
"None",
",",
"value",
"=",
"None",
",",
"args",
"=",
"None",
")",
":",
"if",
"node",
"and",
"node",
".",
"node_id",
"!=",
"self",
".",
"node_id",
":",
"return",
"if",
"args",
"is",
"not",
"None",
"and",
"\"nodeId\"",
"in",
"args",
"and",
"args",
"[",
"\"nodeId\"",
"]",
"!=",
"self",
".",
"node_id",
":",
"return",
"# Process central scene activation",
"if",
"value",
"is",
"not",
"None",
"and",
"value",
".",
"command_class",
"==",
"COMMAND_CLASS_CENTRAL_SCENE",
":",
"self",
".",
"central_scene_activated",
"(",
"value",
".",
"index",
",",
"value",
".",
"data",
")",
"self",
".",
"maybe_update_application_version",
"(",
"value",
")",
"self",
".",
"node_changed",
"(",
")"
] | [
185,
4
] | [
198,
27
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.get_node_statistics | (self) | Retrieve statistics from the node. | Retrieve statistics from the node. | def get_node_statistics(self):
"""Retrieve statistics from the node."""
return self._network.manager.getNodeStatistics(
self._network.home_id, self.node_id
) | [
"def",
"get_node_statistics",
"(",
"self",
")",
":",
"return",
"self",
".",
"_network",
".",
"manager",
".",
"getNodeStatistics",
"(",
"self",
".",
"_network",
".",
"home_id",
",",
"self",
".",
"node_id",
")"
] | [
200,
4
] | [
204,
9
] | python | en | ['en', 'pt', 'en'] | True |
ZWaveNodeEntity.node_changed | (self) | Update node properties. | Update node properties. | def node_changed(self):
"""Update node properties."""
attributes = {}
stats = self.get_node_statistics()
for attr in ATTRIBUTES:
value = getattr(self.node, attr)
if attr in _REQUIRED_ATTRIBUTES or value:
attributes[attr] = value
for attr in _COMM_ATTRIBUTES:
attributes[attr] = stats[attr]
if self.node.can_wake_up():
for value in self.node.get_values(COMMAND_CLASS_WAKE_UP).values():
if value.index != 0:
continue
self.wakeup_interval = value.data
break
else:
self.wakeup_interval = None
self.battery_level = self.node.get_battery_level()
self._product_name = self.node.product_name
self._manufacturer_name = self.node.manufacturer_name
self._name = node_name(self.node)
self._attributes = attributes
if not self._unique_id:
self._unique_id = self._compute_unique_id()
if self._unique_id:
# Node info parsed. Remove and re-add
self.try_remove_and_add()
self.maybe_schedule_update() | [
"def",
"node_changed",
"(",
"self",
")",
":",
"attributes",
"=",
"{",
"}",
"stats",
"=",
"self",
".",
"get_node_statistics",
"(",
")",
"for",
"attr",
"in",
"ATTRIBUTES",
":",
"value",
"=",
"getattr",
"(",
"self",
".",
"node",
",",
"attr",
")",
"if",
"attr",
"in",
"_REQUIRED_ATTRIBUTES",
"or",
"value",
":",
"attributes",
"[",
"attr",
"]",
"=",
"value",
"for",
"attr",
"in",
"_COMM_ATTRIBUTES",
":",
"attributes",
"[",
"attr",
"]",
"=",
"stats",
"[",
"attr",
"]",
"if",
"self",
".",
"node",
".",
"can_wake_up",
"(",
")",
":",
"for",
"value",
"in",
"self",
".",
"node",
".",
"get_values",
"(",
"COMMAND_CLASS_WAKE_UP",
")",
".",
"values",
"(",
")",
":",
"if",
"value",
".",
"index",
"!=",
"0",
":",
"continue",
"self",
".",
"wakeup_interval",
"=",
"value",
".",
"data",
"break",
"else",
":",
"self",
".",
"wakeup_interval",
"=",
"None",
"self",
".",
"battery_level",
"=",
"self",
".",
"node",
".",
"get_battery_level",
"(",
")",
"self",
".",
"_product_name",
"=",
"self",
".",
"node",
".",
"product_name",
"self",
".",
"_manufacturer_name",
"=",
"self",
".",
"node",
".",
"manufacturer_name",
"self",
".",
"_name",
"=",
"node_name",
"(",
"self",
".",
"node",
")",
"self",
".",
"_attributes",
"=",
"attributes",
"if",
"not",
"self",
".",
"_unique_id",
":",
"self",
".",
"_unique_id",
"=",
"self",
".",
"_compute_unique_id",
"(",
")",
"if",
"self",
".",
"_unique_id",
":",
"# Node info parsed. Remove and re-add",
"self",
".",
"try_remove_and_add",
"(",
")",
"self",
".",
"maybe_schedule_update",
"(",
")"
] | [
206,
4
] | [
240,
36
] | python | en | ['en', 'nl', 'en'] | True |
ZWaveNodeEntity.node_renamed | (self, update_ids=False) | Rename the node and update any IDs. | Rename the node and update any IDs. | async def node_renamed(self, update_ids=False):
"""Rename the node and update any IDs."""
identifier, self._name = node_device_id_and_name(self.node)
# Set the name in the devices. If they're customised
# the customisation will not be stored as name and will stick.
dev_reg = await get_dev_reg(self.hass)
device = dev_reg.async_get_device(identifiers={identifier}, connections=set())
dev_reg.async_update_device(device.id, name=self._name)
# update sub-devices too
for i in count(2):
identifier, new_name = node_device_id_and_name(self.node, i)
device = dev_reg.async_get_device(
identifiers={identifier}, connections=set()
)
if not device:
break
dev_reg.async_update_device(device.id, name=new_name)
# Update entity ID.
if update_ids:
ent_reg = await async_get_registry(self.hass)
new_entity_id = ent_reg.async_generate_entity_id(
DOMAIN, self._name, self.platform.entities.keys() - {self.entity_id}
)
if new_entity_id != self.entity_id:
# Don't change the name attribute, it will be None unless
# customised and if it's been customised, keep the
# customisation.
ent_reg.async_update_entity(self.entity_id, new_entity_id=new_entity_id)
return
# else for the above two ifs, update if not using update_entity
self.async_write_ha_state() | [
"async",
"def",
"node_renamed",
"(",
"self",
",",
"update_ids",
"=",
"False",
")",
":",
"identifier",
",",
"self",
".",
"_name",
"=",
"node_device_id_and_name",
"(",
"self",
".",
"node",
")",
"# Set the name in the devices. If they're customised",
"# the customisation will not be stored as name and will stick.",
"dev_reg",
"=",
"await",
"get_dev_reg",
"(",
"self",
".",
"hass",
")",
"device",
"=",
"dev_reg",
".",
"async_get_device",
"(",
"identifiers",
"=",
"{",
"identifier",
"}",
",",
"connections",
"=",
"set",
"(",
")",
")",
"dev_reg",
".",
"async_update_device",
"(",
"device",
".",
"id",
",",
"name",
"=",
"self",
".",
"_name",
")",
"# update sub-devices too",
"for",
"i",
"in",
"count",
"(",
"2",
")",
":",
"identifier",
",",
"new_name",
"=",
"node_device_id_and_name",
"(",
"self",
".",
"node",
",",
"i",
")",
"device",
"=",
"dev_reg",
".",
"async_get_device",
"(",
"identifiers",
"=",
"{",
"identifier",
"}",
",",
"connections",
"=",
"set",
"(",
")",
")",
"if",
"not",
"device",
":",
"break",
"dev_reg",
".",
"async_update_device",
"(",
"device",
".",
"id",
",",
"name",
"=",
"new_name",
")",
"# Update entity ID.",
"if",
"update_ids",
":",
"ent_reg",
"=",
"await",
"async_get_registry",
"(",
"self",
".",
"hass",
")",
"new_entity_id",
"=",
"ent_reg",
".",
"async_generate_entity_id",
"(",
"DOMAIN",
",",
"self",
".",
"_name",
",",
"self",
".",
"platform",
".",
"entities",
".",
"keys",
"(",
")",
"-",
"{",
"self",
".",
"entity_id",
"}",
")",
"if",
"new_entity_id",
"!=",
"self",
".",
"entity_id",
":",
"# Don't change the name attribute, it will be None unless",
"# customised and if it's been customised, keep the",
"# customisation.",
"ent_reg",
".",
"async_update_entity",
"(",
"self",
".",
"entity_id",
",",
"new_entity_id",
"=",
"new_entity_id",
")",
"return",
"# else for the above two ifs, update if not using update_entity",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
242,
4
] | [
273,
35
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.network_node_event | (self, node, value) | Handle a node activated event on the network. | Handle a node activated event on the network. | def network_node_event(self, node, value):
"""Handle a node activated event on the network."""
if node.node_id == self.node.node_id:
self.node_event(value) | [
"def",
"network_node_event",
"(",
"self",
",",
"node",
",",
"value",
")",
":",
"if",
"node",
".",
"node_id",
"==",
"self",
".",
"node",
".",
"node_id",
":",
"self",
".",
"node_event",
"(",
"value",
")"
] | [
275,
4
] | [
278,
34
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.node_event | (self, value) | Handle a node activated event for this node. | Handle a node activated event for this node. | def node_event(self, value):
"""Handle a node activated event for this node."""
if self.hass is None:
return
self.hass.bus.fire(
EVENT_NODE_EVENT,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_NODE_ID: self.node.node_id,
ATTR_BASIC_LEVEL: value,
},
) | [
"def",
"node_event",
"(",
"self",
",",
"value",
")",
":",
"if",
"self",
".",
"hass",
"is",
"None",
":",
"return",
"self",
".",
"hass",
".",
"bus",
".",
"fire",
"(",
"EVENT_NODE_EVENT",
",",
"{",
"ATTR_ENTITY_ID",
":",
"self",
".",
"entity_id",
",",
"ATTR_NODE_ID",
":",
"self",
".",
"node",
".",
"node_id",
",",
"ATTR_BASIC_LEVEL",
":",
"value",
",",
"}",
",",
")"
] | [
280,
4
] | [
292,
9
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.network_scene_activated | (self, node, scene_id) | Handle a scene activated event on the network. | Handle a scene activated event on the network. | def network_scene_activated(self, node, scene_id):
"""Handle a scene activated event on the network."""
if node.node_id == self.node.node_id:
self.scene_activated(scene_id) | [
"def",
"network_scene_activated",
"(",
"self",
",",
"node",
",",
"scene_id",
")",
":",
"if",
"node",
".",
"node_id",
"==",
"self",
".",
"node",
".",
"node_id",
":",
"self",
".",
"scene_activated",
"(",
"scene_id",
")"
] | [
294,
4
] | [
297,
42
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.scene_activated | (self, scene_id) | Handle an activated scene for this node. | Handle an activated scene for this node. | def scene_activated(self, scene_id):
"""Handle an activated scene for this node."""
if self.hass is None:
return
self.hass.bus.fire(
EVENT_SCENE_ACTIVATED,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_NODE_ID: self.node.node_id,
ATTR_SCENE_ID: scene_id,
},
) | [
"def",
"scene_activated",
"(",
"self",
",",
"scene_id",
")",
":",
"if",
"self",
".",
"hass",
"is",
"None",
":",
"return",
"self",
".",
"hass",
".",
"bus",
".",
"fire",
"(",
"EVENT_SCENE_ACTIVATED",
",",
"{",
"ATTR_ENTITY_ID",
":",
"self",
".",
"entity_id",
",",
"ATTR_NODE_ID",
":",
"self",
".",
"node",
".",
"node_id",
",",
"ATTR_SCENE_ID",
":",
"scene_id",
",",
"}",
",",
")"
] | [
299,
4
] | [
311,
9
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.central_scene_activated | (self, scene_id, scene_data) | Handle an activated central scene for this node. | Handle an activated central scene for this node. | def central_scene_activated(self, scene_id, scene_data):
"""Handle an activated central scene for this node."""
if self.hass is None:
return
self.hass.bus.fire(
EVENT_SCENE_ACTIVATED,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_NODE_ID: self.node_id,
ATTR_SCENE_ID: scene_id,
ATTR_SCENE_DATA: scene_data,
},
) | [
"def",
"central_scene_activated",
"(",
"self",
",",
"scene_id",
",",
"scene_data",
")",
":",
"if",
"self",
".",
"hass",
"is",
"None",
":",
"return",
"self",
".",
"hass",
".",
"bus",
".",
"fire",
"(",
"EVENT_SCENE_ACTIVATED",
",",
"{",
"ATTR_ENTITY_ID",
":",
"self",
".",
"entity_id",
",",
"ATTR_NODE_ID",
":",
"self",
".",
"node_id",
",",
"ATTR_SCENE_ID",
":",
"scene_id",
",",
"ATTR_SCENE_DATA",
":",
"scene_data",
",",
"}",
",",
")"
] | [
313,
4
] | [
326,
9
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.state | (self) | Return the state. | Return the state. | def state(self):
"""Return the state."""
if ATTR_READY not in self._attributes:
return None
if self._attributes[ATTR_FAILED]:
return "dead"
if self._attributes[ATTR_QUERY_STAGE] != "Complete":
return "initializing"
if not self._attributes[ATTR_AWAKE]:
return "sleeping"
if self._attributes[ATTR_READY]:
return "ready"
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"ATTR_READY",
"not",
"in",
"self",
".",
"_attributes",
":",
"return",
"None",
"if",
"self",
".",
"_attributes",
"[",
"ATTR_FAILED",
"]",
":",
"return",
"\"dead\"",
"if",
"self",
".",
"_attributes",
"[",
"ATTR_QUERY_STAGE",
"]",
"!=",
"\"Complete\"",
":",
"return",
"\"initializing\"",
"if",
"not",
"self",
".",
"_attributes",
"[",
"ATTR_AWAKE",
"]",
":",
"return",
"\"sleeping\"",
"if",
"self",
".",
"_attributes",
"[",
"ATTR_READY",
"]",
":",
"return",
"\"ready\"",
"return",
"None"
] | [
329,
4
] | [
343,
19
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.should_poll | (self) | No polling needed. | No polling needed. | def should_poll(self):
"""No polling needed."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
346,
4
] | [
348,
20
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.name | (self) | Return the name of the device. | Return the name of the device. | def name(self):
"""Return the name of the device."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
351,
4
] | [
353,
25
] | python | en | ['en', 'en', 'en'] | True |
ZWaveNodeEntity.device_state_attributes | (self) | Return the device specific state attributes. | Return the device specific state attributes. | def device_state_attributes(self):
"""Return the device specific state attributes."""
attrs = {
ATTR_NODE_ID: self.node_id,
ATTR_NODE_NAME: self._name,
ATTR_MANUFACTURER_NAME: self._manufacturer_name,
ATTR_PRODUCT_NAME: self._product_name,
}
attrs.update(self._attributes)
if self.battery_level is not None:
attrs[ATTR_BATTERY_LEVEL] = self.battery_level
if self.wakeup_interval is not None:
attrs[ATTR_WAKEUP] = self.wakeup_interval
if self._application_version is not None:
attrs[ATTR_APPLICATION_VERSION] = self._application_version
return attrs | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"attrs",
"=",
"{",
"ATTR_NODE_ID",
":",
"self",
".",
"node_id",
",",
"ATTR_NODE_NAME",
":",
"self",
".",
"_name",
",",
"ATTR_MANUFACTURER_NAME",
":",
"self",
".",
"_manufacturer_name",
",",
"ATTR_PRODUCT_NAME",
":",
"self",
".",
"_product_name",
",",
"}",
"attrs",
".",
"update",
"(",
"self",
".",
"_attributes",
")",
"if",
"self",
".",
"battery_level",
"is",
"not",
"None",
":",
"attrs",
"[",
"ATTR_BATTERY_LEVEL",
"]",
"=",
"self",
".",
"battery_level",
"if",
"self",
".",
"wakeup_interval",
"is",
"not",
"None",
":",
"attrs",
"[",
"ATTR_WAKEUP",
"]",
"=",
"self",
".",
"wakeup_interval",
"if",
"self",
".",
"_application_version",
"is",
"not",
"None",
":",
"attrs",
"[",
"ATTR_APPLICATION_VERSION",
"]",
"=",
"self",
".",
"_application_version",
"return",
"attrs"
] | [
356,
4
] | [
372,
20
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) | Set up the Avri Waste platform. | Set up the Avri Waste platform. | async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Avri Waste platform."""
client = hass.data[DOMAIN][entry.entry_id]
integration_id = entry.data[CONF_ID]
try:
each_upcoming = await hass.async_add_executor_job(client.upcoming_of_each)
except AvriException as ex:
raise PlatformNotReady from ex
else:
entities = [
AvriWasteUpcoming(client, upcoming.name, integration_id)
for upcoming in each_upcoming
]
async_add_entities(entities, True) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"entry",
":",
"ConfigEntry",
",",
"async_add_entities",
")",
"->",
"None",
":",
"client",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"entry",
".",
"entry_id",
"]",
"integration_id",
"=",
"entry",
".",
"data",
"[",
"CONF_ID",
"]",
"try",
":",
"each_upcoming",
"=",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"client",
".",
"upcoming_of_each",
")",
"except",
"AvriException",
"as",
"ex",
":",
"raise",
"PlatformNotReady",
"from",
"ex",
"else",
":",
"entities",
"=",
"[",
"AvriWasteUpcoming",
"(",
"client",
",",
"upcoming",
".",
"name",
",",
"integration_id",
")",
"for",
"upcoming",
"in",
"each_upcoming",
"]",
"async_add_entities",
"(",
"entities",
",",
"True",
")"
] | [
16,
0
] | [
32,
42
] | python | en | ['en', 'hr', 'en'] | True |
AvriWasteUpcoming.__init__ | (self, client: Avri, waste_type: str, integration_id: str) | Initialize the sensor. | Initialize the sensor. | def __init__(self, client: Avri, waste_type: str, integration_id: str):
"""Initialize the sensor."""
self._waste_type = waste_type
self._name = f"{self._waste_type}".title()
self._state = None
self._client = client
self._state_available = False
self._integration_id = integration_id | [
"def",
"__init__",
"(",
"self",
",",
"client",
":",
"Avri",
",",
"waste_type",
":",
"str",
",",
"integration_id",
":",
"str",
")",
":",
"self",
".",
"_waste_type",
"=",
"waste_type",
"self",
".",
"_name",
"=",
"f\"{self._waste_type}\"",
".",
"title",
"(",
")",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_client",
"=",
"client",
"self",
".",
"_state_available",
"=",
"False",
"self",
".",
"_integration_id",
"=",
"integration_id"
] | [
38,
4
] | [
45,
45
] | python | en | ['en', 'en', 'en'] | True |
AvriWasteUpcoming.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
48,
4
] | [
50,
25
] | python | en | ['en', 'mi', 'en'] | True |
AvriWasteUpcoming.unique_id | (self) | Return a unique ID. | Return a unique ID. | def unique_id(self) -> str:
"""Return a unique ID."""
return (f"{self._integration_id}" f"-{self._waste_type}").replace(" ", "") | [
"def",
"unique_id",
"(",
"self",
")",
"->",
"str",
":",
"return",
"(",
"f\"{self._integration_id}\"",
"f\"-{self._waste_type}\"",
")",
".",
"replace",
"(",
"\" \"",
",",
"\"\"",
")"
] | [
53,
4
] | [
55,
82
] | python | ca | ['fr', 'ca', 'en'] | False |
AvriWasteUpcoming.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
58,
4
] | [
60,
26
] | python | en | ['en', 'en', 'en'] | True |
AvriWasteUpcoming.available | (self) | Return True if entity is available. | Return True if entity is available. | def available(self):
"""Return True if entity is available."""
return self._state_available | [
"def",
"available",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state_available"
] | [
63,
4
] | [
65,
36
] | python | en | ['en', 'en', 'en'] | True |
AvriWasteUpcoming.device_class | (self) | Return the device class of the sensor. | Return the device class of the sensor. | def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_TIMESTAMP | [
"def",
"device_class",
"(",
"self",
")",
":",
"return",
"DEVICE_CLASS_TIMESTAMP"
] | [
68,
4
] | [
70,
37
] | python | en | ['en', 'en', 'en'] | True |
AvriWasteUpcoming.icon | (self) | Icon to use in the frontend. | Icon to use in the frontend. | def icon(self):
"""Icon to use in the frontend."""
return ICON | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"ICON"
] | [
73,
4
] | [
75,
19
] | python | en | ['en', 'en', 'en'] | True |
AvriWasteUpcoming.async_update | (self) | Update the data. | Update the data. | async def async_update(self):
"""Update the data."""
if not self.enabled:
return
try:
pickup_events = self._client.upcoming_of_each()
except AvriException as ex:
_LOGGER.error(
"There was an error retrieving upcoming garbage pickups: %s", ex
)
self._state_available = False
self._state = None
else:
self._state_available = True
matched_events = list(
filter(lambda event: event.name == self._waste_type, pickup_events)
)
if not matched_events:
self._state = None
else:
self._state = matched_events[0].day.date() | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"enabled",
":",
"return",
"try",
":",
"pickup_events",
"=",
"self",
".",
"_client",
".",
"upcoming_of_each",
"(",
")",
"except",
"AvriException",
"as",
"ex",
":",
"_LOGGER",
".",
"error",
"(",
"\"There was an error retrieving upcoming garbage pickups: %s\"",
",",
"ex",
")",
"self",
".",
"_state_available",
"=",
"False",
"self",
".",
"_state",
"=",
"None",
"else",
":",
"self",
".",
"_state_available",
"=",
"True",
"matched_events",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"event",
":",
"event",
".",
"name",
"==",
"self",
".",
"_waste_type",
",",
"pickup_events",
")",
")",
"if",
"not",
"matched_events",
":",
"self",
".",
"_state",
"=",
"None",
"else",
":",
"self",
".",
"_state",
"=",
"matched_events",
"[",
"0",
"]",
".",
"day",
".",
"date",
"(",
")"
] | [
77,
4
] | [
98,
58
] | python | en | ['en', 'sn', 'en'] | True |
train | (args, train_dataset, model, tokenizer, teacher=None) | Train the model | Train the model | def train(args, train_dataset, model, tokenizer, teacher=None):
""" Train the model """
if args.local_rank in [-1, 0]:
tb_writer = SummaryWriter(log_dir=args.output_dir)
args.train_batch_size = args.per_gpu_train_batch_size * max(1, args.n_gpu)
train_sampler = RandomSampler(train_dataset) if args.local_rank == -1 else DistributedSampler(train_dataset)
train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size)
if args.max_steps > 0:
t_total = args.max_steps
args.num_train_epochs = args.max_steps // (len(train_dataloader) // args.gradient_accumulation_steps) + 1
else:
t_total = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs
# Prepare optimizer and schedule (linear warmup and decay)
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in model.named_parameters() if "mask_score" in n and p.requires_grad],
"lr": args.mask_scores_learning_rate,
},
{
"params": [
p
for n, p in model.named_parameters()
if "mask_score" not in n and p.requires_grad and not any(nd in n for nd in no_decay)
],
"lr": args.learning_rate,
"weight_decay": args.weight_decay,
},
{
"params": [
p
for n, p in model.named_parameters()
if "mask_score" not in n and p.requires_grad and any(nd in n for nd in no_decay)
],
"lr": args.learning_rate,
"weight_decay": 0.0,
},
]
optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon)
scheduler = get_linear_schedule_with_warmup(
optimizer, num_warmup_steps=args.warmup_steps, num_training_steps=t_total
)
# Check if saved optimizer or scheduler states exist
if os.path.isfile(os.path.join(args.model_name_or_path, "optimizer.pt")) and os.path.isfile(
os.path.join(args.model_name_or_path, "scheduler.pt")
):
# Load in optimizer and scheduler states
optimizer.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "optimizer.pt")))
scheduler.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "scheduler.pt")))
if args.fp16:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
model, optimizer = amp.initialize(model, optimizer, opt_level=args.fp16_opt_level)
# multi-gpu training (should be after apex fp16 initialization)
if args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Distributed training (should be after apex fp16 initialization)
if args.local_rank != -1:
model = torch.nn.parallel.DistributedDataParallel(
model,
device_ids=[args.local_rank],
output_device=args.local_rank,
find_unused_parameters=True,
)
# Train!
logger.info("***** Running training *****")
logger.info(" Num examples = %d", len(train_dataset))
logger.info(" Num Epochs = %d", args.num_train_epochs)
logger.info(" Instantaneous batch size per GPU = %d", args.per_gpu_train_batch_size)
logger.info(
" Total train batch size (w. parallel, distributed & accumulation) = %d",
args.train_batch_size
* args.gradient_accumulation_steps
* (torch.distributed.get_world_size() if args.local_rank != -1 else 1),
)
logger.info(" Gradient Accumulation steps = %d", args.gradient_accumulation_steps)
logger.info(" Total optimization steps = %d", t_total)
# Distillation
if teacher is not None:
logger.info(" Training with distillation")
global_step = 0
# Global TopK
if args.global_topk:
threshold_mem = None
epochs_trained = 0
steps_trained_in_current_epoch = 0
# Check if continuing training from a checkpoint
if os.path.exists(args.model_name_or_path):
# set global_step to global_step of last saved checkpoint from model path
try:
global_step = int(args.model_name_or_path.split("-")[-1].split("/")[0])
except ValueError:
global_step = 0
epochs_trained = global_step // (len(train_dataloader) // args.gradient_accumulation_steps)
steps_trained_in_current_epoch = global_step % (len(train_dataloader) // args.gradient_accumulation_steps)
logger.info(" Continuing training from checkpoint, will skip to saved global_step")
logger.info(" Continuing training from epoch %d", epochs_trained)
logger.info(" Continuing training from global step %d", global_step)
logger.info(" Will skip the first %d steps in the first epoch", steps_trained_in_current_epoch)
tr_loss, logging_loss = 0.0, 0.0
model.zero_grad()
train_iterator = trange(
epochs_trained,
int(args.num_train_epochs),
desc="Epoch",
disable=args.local_rank not in [-1, 0],
)
set_seed(args) # Added here for reproducibility
for _ in train_iterator:
epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0])
for step, batch in enumerate(epoch_iterator):
# Skip past any already trained steps if resuming training
if steps_trained_in_current_epoch > 0:
steps_trained_in_current_epoch -= 1
continue
model.train()
batch = tuple(t.to(args.device) for t in batch)
threshold, regu_lambda = schedule_threshold(
step=global_step,
total_step=t_total,
warmup_steps=args.warmup_steps,
final_threshold=args.final_threshold,
initial_threshold=args.initial_threshold,
final_warmup=args.final_warmup,
initial_warmup=args.initial_warmup,
final_lambda=args.final_lambda,
)
# Global TopK
if args.global_topk:
if threshold == 1.0:
threshold = -1e2 # Or an indefinitely low quantity
else:
if (threshold_mem is None) or (global_step % args.global_topk_frequency_compute == 0):
# Sort all the values to get the global topK
concat = torch.cat(
[param.view(-1) for name, param in model.named_parameters() if "mask_scores" in name]
)
n = concat.numel()
kth = max(n - (int(n * threshold) + 1), 1)
threshold_mem = concat.kthvalue(kth).values.item()
threshold = threshold_mem
else:
threshold = threshold_mem
inputs = {"input_ids": batch[0], "attention_mask": batch[1], "labels": batch[3]}
if args.model_type != "distilbert":
inputs["token_type_ids"] = (
batch[2] if args.model_type in ["bert", "masked_bert", "xlnet", "albert"] else None
) # XLM, DistilBERT, RoBERTa, and XLM-RoBERTa don't use segment_ids
if "masked" in args.model_type:
inputs["threshold"] = threshold
outputs = model(**inputs)
loss, logits_stu = outputs # model outputs are always tuple in transformers (see doc)
# Distillation loss
if teacher is not None:
if "token_type_ids" not in inputs:
inputs["token_type_ids"] = None if args.teacher_type == "xlm" else batch[2]
with torch.no_grad():
(logits_tea,) = teacher(
input_ids=inputs["input_ids"],
token_type_ids=inputs["token_type_ids"],
attention_mask=inputs["attention_mask"],
)
loss_logits = (
F.kl_div(
input=F.log_softmax(logits_stu / args.temperature, dim=-1),
target=F.softmax(logits_tea / args.temperature, dim=-1),
reduction="batchmean",
)
* (args.temperature ** 2)
)
loss = args.alpha_distil * loss_logits + args.alpha_ce * loss
# Regularization
if args.regularization is not None:
regu_ = regularization(model=model, mode=args.regularization)
loss = loss + regu_lambda * regu_
if args.n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu parallel training
if args.gradient_accumulation_steps > 1:
loss = loss / args.gradient_accumulation_steps
if args.fp16:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
tr_loss += loss.item()
if (step + 1) % args.gradient_accumulation_steps == 0 or (
# last step in epoch but step is always smaller than gradient_accumulation_steps
len(epoch_iterator) <= args.gradient_accumulation_steps
and (step + 1) == len(epoch_iterator)
):
if args.fp16:
torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), args.max_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
if args.local_rank in [-1, 0] and args.logging_steps > 0 and global_step % args.logging_steps == 0:
tb_writer.add_scalar("threshold", threshold, global_step)
for name, param in model.named_parameters():
if not param.requires_grad:
continue
tb_writer.add_scalar("parameter_mean/" + name, param.data.mean(), global_step)
tb_writer.add_scalar("parameter_std/" + name, param.data.std(), global_step)
tb_writer.add_scalar("parameter_min/" + name, param.data.min(), global_step)
tb_writer.add_scalar("parameter_max/" + name, param.data.max(), global_step)
tb_writer.add_scalar("grad_mean/" + name, param.grad.data.mean(), global_step)
tb_writer.add_scalar("grad_std/" + name, param.grad.data.std(), global_step)
if args.regularization is not None and "mask_scores" in name:
if args.regularization == "l1":
perc = (torch.sigmoid(param) > threshold).sum().item() / param.numel()
elif args.regularization == "l0":
perc = (torch.sigmoid(param - 2 / 3 * np.log(0.1 / 1.1))).sum().item() / param.numel()
tb_writer.add_scalar("retained_weights_perc/" + name, perc, global_step)
optimizer.step()
scheduler.step() # Update learning rate schedule
model.zero_grad()
global_step += 1
if args.local_rank in [-1, 0] and args.logging_steps > 0 and global_step % args.logging_steps == 0:
logs = {}
if (
args.local_rank == -1 and args.evaluate_during_training
): # Only evaluate when single GPU otherwise metrics may not average well
results = evaluate(args, model, tokenizer)
for key, value in results.items():
eval_key = "eval_{}".format(key)
logs[eval_key] = value
loss_scalar = (tr_loss - logging_loss) / args.logging_steps
learning_rate_scalar = scheduler.get_lr()
logs["learning_rate"] = learning_rate_scalar[0]
if len(learning_rate_scalar) > 1:
for idx, lr in enumerate(learning_rate_scalar[1:]):
logs[f"learning_rate/{idx+1}"] = lr
logs["loss"] = loss_scalar
if teacher is not None:
logs["loss/distil"] = loss_logits.item()
if args.regularization is not None:
logs["loss/regularization"] = regu_.item()
if (teacher is not None) or (args.regularization is not None):
if (teacher is not None) and (args.regularization is not None):
logs["loss/instant_ce"] = (
loss.item()
- regu_lambda * logs["loss/regularization"]
- args.alpha_distil * logs["loss/distil"]
) / args.alpha_ce
elif teacher is not None:
logs["loss/instant_ce"] = (
loss.item() - args.alpha_distil * logs["loss/distil"]
) / args.alpha_ce
else:
logs["loss/instant_ce"] = loss.item() - regu_lambda * logs["loss/regularization"]
logging_loss = tr_loss
for key, value in logs.items():
tb_writer.add_scalar(key, value, global_step)
print(json.dumps({**logs, **{"step": global_step}}))
if args.local_rank in [-1, 0] and args.save_steps > 0 and global_step % args.save_steps == 0:
# Save model checkpoint
output_dir = os.path.join(args.output_dir, "checkpoint-{}".format(global_step))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
model_to_save = (
model.module if hasattr(model, "module") else model
) # Take care of distributed/parallel training
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
torch.save(args, os.path.join(output_dir, "training_args.bin"))
logger.info("Saving model checkpoint to %s", output_dir)
torch.save(optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
torch.save(scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
logger.info("Saving optimizer and scheduler states to %s", output_dir)
if args.max_steps > 0 and global_step > args.max_steps:
epoch_iterator.close()
break
if args.max_steps > 0 and global_step > args.max_steps:
train_iterator.close()
break
if args.local_rank in [-1, 0]:
tb_writer.close()
return global_step, tr_loss / global_step | [
"def",
"train",
"(",
"args",
",",
"train_dataset",
",",
"model",
",",
"tokenizer",
",",
"teacher",
"=",
"None",
")",
":",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
":",
"tb_writer",
"=",
"SummaryWriter",
"(",
"log_dir",
"=",
"args",
".",
"output_dir",
")",
"args",
".",
"train_batch_size",
"=",
"args",
".",
"per_gpu_train_batch_size",
"*",
"max",
"(",
"1",
",",
"args",
".",
"n_gpu",
")",
"train_sampler",
"=",
"RandomSampler",
"(",
"train_dataset",
")",
"if",
"args",
".",
"local_rank",
"==",
"-",
"1",
"else",
"DistributedSampler",
"(",
"train_dataset",
")",
"train_dataloader",
"=",
"DataLoader",
"(",
"train_dataset",
",",
"sampler",
"=",
"train_sampler",
",",
"batch_size",
"=",
"args",
".",
"train_batch_size",
")",
"if",
"args",
".",
"max_steps",
">",
"0",
":",
"t_total",
"=",
"args",
".",
"max_steps",
"args",
".",
"num_train_epochs",
"=",
"args",
".",
"max_steps",
"//",
"(",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
")",
"+",
"1",
"else",
":",
"t_total",
"=",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
"*",
"args",
".",
"num_train_epochs",
"# Prepare optimizer and schedule (linear warmup and decay)",
"no_decay",
"=",
"[",
"\"bias\"",
",",
"\"LayerNorm.weight\"",
"]",
"optimizer_grouped_parameters",
"=",
"[",
"{",
"\"params\"",
":",
"[",
"p",
"for",
"n",
",",
"p",
"in",
"model",
".",
"named_parameters",
"(",
")",
"if",
"\"mask_score\"",
"in",
"n",
"and",
"p",
".",
"requires_grad",
"]",
",",
"\"lr\"",
":",
"args",
".",
"mask_scores_learning_rate",
",",
"}",
",",
"{",
"\"params\"",
":",
"[",
"p",
"for",
"n",
",",
"p",
"in",
"model",
".",
"named_parameters",
"(",
")",
"if",
"\"mask_score\"",
"not",
"in",
"n",
"and",
"p",
".",
"requires_grad",
"and",
"not",
"any",
"(",
"nd",
"in",
"n",
"for",
"nd",
"in",
"no_decay",
")",
"]",
",",
"\"lr\"",
":",
"args",
".",
"learning_rate",
",",
"\"weight_decay\"",
":",
"args",
".",
"weight_decay",
",",
"}",
",",
"{",
"\"params\"",
":",
"[",
"p",
"for",
"n",
",",
"p",
"in",
"model",
".",
"named_parameters",
"(",
")",
"if",
"\"mask_score\"",
"not",
"in",
"n",
"and",
"p",
".",
"requires_grad",
"and",
"any",
"(",
"nd",
"in",
"n",
"for",
"nd",
"in",
"no_decay",
")",
"]",
",",
"\"lr\"",
":",
"args",
".",
"learning_rate",
",",
"\"weight_decay\"",
":",
"0.0",
",",
"}",
",",
"]",
"optimizer",
"=",
"AdamW",
"(",
"optimizer_grouped_parameters",
",",
"lr",
"=",
"args",
".",
"learning_rate",
",",
"eps",
"=",
"args",
".",
"adam_epsilon",
")",
"scheduler",
"=",
"get_linear_schedule_with_warmup",
"(",
"optimizer",
",",
"num_warmup_steps",
"=",
"args",
".",
"warmup_steps",
",",
"num_training_steps",
"=",
"t_total",
")",
"# Check if saved optimizer or scheduler states exist",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"optimizer.pt\"",
")",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"scheduler.pt\"",
")",
")",
":",
"# Load in optimizer and scheduler states",
"optimizer",
".",
"load_state_dict",
"(",
"torch",
".",
"load",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"optimizer.pt\"",
")",
")",
")",
"scheduler",
".",
"load_state_dict",
"(",
"torch",
".",
"load",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"scheduler.pt\"",
")",
")",
")",
"if",
"args",
".",
"fp16",
":",
"try",
":",
"from",
"apex",
"import",
"amp",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"\"Please install apex from https://www.github.com/nvidia/apex to use fp16 training.\"",
")",
"model",
",",
"optimizer",
"=",
"amp",
".",
"initialize",
"(",
"model",
",",
"optimizer",
",",
"opt_level",
"=",
"args",
".",
"fp16_opt_level",
")",
"# multi-gpu training (should be after apex fp16 initialization)",
"if",
"args",
".",
"n_gpu",
">",
"1",
":",
"model",
"=",
"torch",
".",
"nn",
".",
"DataParallel",
"(",
"model",
")",
"# Distributed training (should be after apex fp16 initialization)",
"if",
"args",
".",
"local_rank",
"!=",
"-",
"1",
":",
"model",
"=",
"torch",
".",
"nn",
".",
"parallel",
".",
"DistributedDataParallel",
"(",
"model",
",",
"device_ids",
"=",
"[",
"args",
".",
"local_rank",
"]",
",",
"output_device",
"=",
"args",
".",
"local_rank",
",",
"find_unused_parameters",
"=",
"True",
",",
")",
"# Train!",
"logger",
".",
"info",
"(",
"\"***** Running training *****\"",
")",
"logger",
".",
"info",
"(",
"\" Num examples = %d\"",
",",
"len",
"(",
"train_dataset",
")",
")",
"logger",
".",
"info",
"(",
"\" Num Epochs = %d\"",
",",
"args",
".",
"num_train_epochs",
")",
"logger",
".",
"info",
"(",
"\" Instantaneous batch size per GPU = %d\"",
",",
"args",
".",
"per_gpu_train_batch_size",
")",
"logger",
".",
"info",
"(",
"\" Total train batch size (w. parallel, distributed & accumulation) = %d\"",
",",
"args",
".",
"train_batch_size",
"*",
"args",
".",
"gradient_accumulation_steps",
"*",
"(",
"torch",
".",
"distributed",
".",
"get_world_size",
"(",
")",
"if",
"args",
".",
"local_rank",
"!=",
"-",
"1",
"else",
"1",
")",
",",
")",
"logger",
".",
"info",
"(",
"\" Gradient Accumulation steps = %d\"",
",",
"args",
".",
"gradient_accumulation_steps",
")",
"logger",
".",
"info",
"(",
"\" Total optimization steps = %d\"",
",",
"t_total",
")",
"# Distillation",
"if",
"teacher",
"is",
"not",
"None",
":",
"logger",
".",
"info",
"(",
"\" Training with distillation\"",
")",
"global_step",
"=",
"0",
"# Global TopK",
"if",
"args",
".",
"global_topk",
":",
"threshold_mem",
"=",
"None",
"epochs_trained",
"=",
"0",
"steps_trained_in_current_epoch",
"=",
"0",
"# Check if continuing training from a checkpoint",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"args",
".",
"model_name_or_path",
")",
":",
"# set global_step to global_step of last saved checkpoint from model path",
"try",
":",
"global_step",
"=",
"int",
"(",
"args",
".",
"model_name_or_path",
".",
"split",
"(",
"\"-\"",
")",
"[",
"-",
"1",
"]",
".",
"split",
"(",
"\"/\"",
")",
"[",
"0",
"]",
")",
"except",
"ValueError",
":",
"global_step",
"=",
"0",
"epochs_trained",
"=",
"global_step",
"//",
"(",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
")",
"steps_trained_in_current_epoch",
"=",
"global_step",
"%",
"(",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
")",
"logger",
".",
"info",
"(",
"\" Continuing training from checkpoint, will skip to saved global_step\"",
")",
"logger",
".",
"info",
"(",
"\" Continuing training from epoch %d\"",
",",
"epochs_trained",
")",
"logger",
".",
"info",
"(",
"\" Continuing training from global step %d\"",
",",
"global_step",
")",
"logger",
".",
"info",
"(",
"\" Will skip the first %d steps in the first epoch\"",
",",
"steps_trained_in_current_epoch",
")",
"tr_loss",
",",
"logging_loss",
"=",
"0.0",
",",
"0.0",
"model",
".",
"zero_grad",
"(",
")",
"train_iterator",
"=",
"trange",
"(",
"epochs_trained",
",",
"int",
"(",
"args",
".",
"num_train_epochs",
")",
",",
"desc",
"=",
"\"Epoch\"",
",",
"disable",
"=",
"args",
".",
"local_rank",
"not",
"in",
"[",
"-",
"1",
",",
"0",
"]",
",",
")",
"set_seed",
"(",
"args",
")",
"# Added here for reproducibility",
"for",
"_",
"in",
"train_iterator",
":",
"epoch_iterator",
"=",
"tqdm",
"(",
"train_dataloader",
",",
"desc",
"=",
"\"Iteration\"",
",",
"disable",
"=",
"args",
".",
"local_rank",
"not",
"in",
"[",
"-",
"1",
",",
"0",
"]",
")",
"for",
"step",
",",
"batch",
"in",
"enumerate",
"(",
"epoch_iterator",
")",
":",
"# Skip past any already trained steps if resuming training",
"if",
"steps_trained_in_current_epoch",
">",
"0",
":",
"steps_trained_in_current_epoch",
"-=",
"1",
"continue",
"model",
".",
"train",
"(",
")",
"batch",
"=",
"tuple",
"(",
"t",
".",
"to",
"(",
"args",
".",
"device",
")",
"for",
"t",
"in",
"batch",
")",
"threshold",
",",
"regu_lambda",
"=",
"schedule_threshold",
"(",
"step",
"=",
"global_step",
",",
"total_step",
"=",
"t_total",
",",
"warmup_steps",
"=",
"args",
".",
"warmup_steps",
",",
"final_threshold",
"=",
"args",
".",
"final_threshold",
",",
"initial_threshold",
"=",
"args",
".",
"initial_threshold",
",",
"final_warmup",
"=",
"args",
".",
"final_warmup",
",",
"initial_warmup",
"=",
"args",
".",
"initial_warmup",
",",
"final_lambda",
"=",
"args",
".",
"final_lambda",
",",
")",
"# Global TopK",
"if",
"args",
".",
"global_topk",
":",
"if",
"threshold",
"==",
"1.0",
":",
"threshold",
"=",
"-",
"1e2",
"# Or an indefinitely low quantity",
"else",
":",
"if",
"(",
"threshold_mem",
"is",
"None",
")",
"or",
"(",
"global_step",
"%",
"args",
".",
"global_topk_frequency_compute",
"==",
"0",
")",
":",
"# Sort all the values to get the global topK",
"concat",
"=",
"torch",
".",
"cat",
"(",
"[",
"param",
".",
"view",
"(",
"-",
"1",
")",
"for",
"name",
",",
"param",
"in",
"model",
".",
"named_parameters",
"(",
")",
"if",
"\"mask_scores\"",
"in",
"name",
"]",
")",
"n",
"=",
"concat",
".",
"numel",
"(",
")",
"kth",
"=",
"max",
"(",
"n",
"-",
"(",
"int",
"(",
"n",
"*",
"threshold",
")",
"+",
"1",
")",
",",
"1",
")",
"threshold_mem",
"=",
"concat",
".",
"kthvalue",
"(",
"kth",
")",
".",
"values",
".",
"item",
"(",
")",
"threshold",
"=",
"threshold_mem",
"else",
":",
"threshold",
"=",
"threshold_mem",
"inputs",
"=",
"{",
"\"input_ids\"",
":",
"batch",
"[",
"0",
"]",
",",
"\"attention_mask\"",
":",
"batch",
"[",
"1",
"]",
",",
"\"labels\"",
":",
"batch",
"[",
"3",
"]",
"}",
"if",
"args",
".",
"model_type",
"!=",
"\"distilbert\"",
":",
"inputs",
"[",
"\"token_type_ids\"",
"]",
"=",
"(",
"batch",
"[",
"2",
"]",
"if",
"args",
".",
"model_type",
"in",
"[",
"\"bert\"",
",",
"\"masked_bert\"",
",",
"\"xlnet\"",
",",
"\"albert\"",
"]",
"else",
"None",
")",
"# XLM, DistilBERT, RoBERTa, and XLM-RoBERTa don't use segment_ids",
"if",
"\"masked\"",
"in",
"args",
".",
"model_type",
":",
"inputs",
"[",
"\"threshold\"",
"]",
"=",
"threshold",
"outputs",
"=",
"model",
"(",
"*",
"*",
"inputs",
")",
"loss",
",",
"logits_stu",
"=",
"outputs",
"# model outputs are always tuple in transformers (see doc)",
"# Distillation loss",
"if",
"teacher",
"is",
"not",
"None",
":",
"if",
"\"token_type_ids\"",
"not",
"in",
"inputs",
":",
"inputs",
"[",
"\"token_type_ids\"",
"]",
"=",
"None",
"if",
"args",
".",
"teacher_type",
"==",
"\"xlm\"",
"else",
"batch",
"[",
"2",
"]",
"with",
"torch",
".",
"no_grad",
"(",
")",
":",
"(",
"logits_tea",
",",
")",
"=",
"teacher",
"(",
"input_ids",
"=",
"inputs",
"[",
"\"input_ids\"",
"]",
",",
"token_type_ids",
"=",
"inputs",
"[",
"\"token_type_ids\"",
"]",
",",
"attention_mask",
"=",
"inputs",
"[",
"\"attention_mask\"",
"]",
",",
")",
"loss_logits",
"=",
"(",
"F",
".",
"kl_div",
"(",
"input",
"=",
"F",
".",
"log_softmax",
"(",
"logits_stu",
"/",
"args",
".",
"temperature",
",",
"dim",
"=",
"-",
"1",
")",
",",
"target",
"=",
"F",
".",
"softmax",
"(",
"logits_tea",
"/",
"args",
".",
"temperature",
",",
"dim",
"=",
"-",
"1",
")",
",",
"reduction",
"=",
"\"batchmean\"",
",",
")",
"*",
"(",
"args",
".",
"temperature",
"**",
"2",
")",
")",
"loss",
"=",
"args",
".",
"alpha_distil",
"*",
"loss_logits",
"+",
"args",
".",
"alpha_ce",
"*",
"loss",
"# Regularization",
"if",
"args",
".",
"regularization",
"is",
"not",
"None",
":",
"regu_",
"=",
"regularization",
"(",
"model",
"=",
"model",
",",
"mode",
"=",
"args",
".",
"regularization",
")",
"loss",
"=",
"loss",
"+",
"regu_lambda",
"*",
"regu_",
"if",
"args",
".",
"n_gpu",
">",
"1",
":",
"loss",
"=",
"loss",
".",
"mean",
"(",
")",
"# mean() to average on multi-gpu parallel training",
"if",
"args",
".",
"gradient_accumulation_steps",
">",
"1",
":",
"loss",
"=",
"loss",
"/",
"args",
".",
"gradient_accumulation_steps",
"if",
"args",
".",
"fp16",
":",
"with",
"amp",
".",
"scale_loss",
"(",
"loss",
",",
"optimizer",
")",
"as",
"scaled_loss",
":",
"scaled_loss",
".",
"backward",
"(",
")",
"else",
":",
"loss",
".",
"backward",
"(",
")",
"tr_loss",
"+=",
"loss",
".",
"item",
"(",
")",
"if",
"(",
"step",
"+",
"1",
")",
"%",
"args",
".",
"gradient_accumulation_steps",
"==",
"0",
"or",
"(",
"# last step in epoch but step is always smaller than gradient_accumulation_steps",
"len",
"(",
"epoch_iterator",
")",
"<=",
"args",
".",
"gradient_accumulation_steps",
"and",
"(",
"step",
"+",
"1",
")",
"==",
"len",
"(",
"epoch_iterator",
")",
")",
":",
"if",
"args",
".",
"fp16",
":",
"torch",
".",
"nn",
".",
"utils",
".",
"clip_grad_norm_",
"(",
"amp",
".",
"master_params",
"(",
"optimizer",
")",
",",
"args",
".",
"max_grad_norm",
")",
"else",
":",
"torch",
".",
"nn",
".",
"utils",
".",
"clip_grad_norm_",
"(",
"model",
".",
"parameters",
"(",
")",
",",
"args",
".",
"max_grad_norm",
")",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
"and",
"args",
".",
"logging_steps",
">",
"0",
"and",
"global_step",
"%",
"args",
".",
"logging_steps",
"==",
"0",
":",
"tb_writer",
".",
"add_scalar",
"(",
"\"threshold\"",
",",
"threshold",
",",
"global_step",
")",
"for",
"name",
",",
"param",
"in",
"model",
".",
"named_parameters",
"(",
")",
":",
"if",
"not",
"param",
".",
"requires_grad",
":",
"continue",
"tb_writer",
".",
"add_scalar",
"(",
"\"parameter_mean/\"",
"+",
"name",
",",
"param",
".",
"data",
".",
"mean",
"(",
")",
",",
"global_step",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"parameter_std/\"",
"+",
"name",
",",
"param",
".",
"data",
".",
"std",
"(",
")",
",",
"global_step",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"parameter_min/\"",
"+",
"name",
",",
"param",
".",
"data",
".",
"min",
"(",
")",
",",
"global_step",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"parameter_max/\"",
"+",
"name",
",",
"param",
".",
"data",
".",
"max",
"(",
")",
",",
"global_step",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"grad_mean/\"",
"+",
"name",
",",
"param",
".",
"grad",
".",
"data",
".",
"mean",
"(",
")",
",",
"global_step",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"grad_std/\"",
"+",
"name",
",",
"param",
".",
"grad",
".",
"data",
".",
"std",
"(",
")",
",",
"global_step",
")",
"if",
"args",
".",
"regularization",
"is",
"not",
"None",
"and",
"\"mask_scores\"",
"in",
"name",
":",
"if",
"args",
".",
"regularization",
"==",
"\"l1\"",
":",
"perc",
"=",
"(",
"torch",
".",
"sigmoid",
"(",
"param",
")",
">",
"threshold",
")",
".",
"sum",
"(",
")",
".",
"item",
"(",
")",
"/",
"param",
".",
"numel",
"(",
")",
"elif",
"args",
".",
"regularization",
"==",
"\"l0\"",
":",
"perc",
"=",
"(",
"torch",
".",
"sigmoid",
"(",
"param",
"-",
"2",
"/",
"3",
"*",
"np",
".",
"log",
"(",
"0.1",
"/",
"1.1",
")",
")",
")",
".",
"sum",
"(",
")",
".",
"item",
"(",
")",
"/",
"param",
".",
"numel",
"(",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"retained_weights_perc/\"",
"+",
"name",
",",
"perc",
",",
"global_step",
")",
"optimizer",
".",
"step",
"(",
")",
"scheduler",
".",
"step",
"(",
")",
"# Update learning rate schedule",
"model",
".",
"zero_grad",
"(",
")",
"global_step",
"+=",
"1",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
"and",
"args",
".",
"logging_steps",
">",
"0",
"and",
"global_step",
"%",
"args",
".",
"logging_steps",
"==",
"0",
":",
"logs",
"=",
"{",
"}",
"if",
"(",
"args",
".",
"local_rank",
"==",
"-",
"1",
"and",
"args",
".",
"evaluate_during_training",
")",
":",
"# Only evaluate when single GPU otherwise metrics may not average well",
"results",
"=",
"evaluate",
"(",
"args",
",",
"model",
",",
"tokenizer",
")",
"for",
"key",
",",
"value",
"in",
"results",
".",
"items",
"(",
")",
":",
"eval_key",
"=",
"\"eval_{}\"",
".",
"format",
"(",
"key",
")",
"logs",
"[",
"eval_key",
"]",
"=",
"value",
"loss_scalar",
"=",
"(",
"tr_loss",
"-",
"logging_loss",
")",
"/",
"args",
".",
"logging_steps",
"learning_rate_scalar",
"=",
"scheduler",
".",
"get_lr",
"(",
")",
"logs",
"[",
"\"learning_rate\"",
"]",
"=",
"learning_rate_scalar",
"[",
"0",
"]",
"if",
"len",
"(",
"learning_rate_scalar",
")",
">",
"1",
":",
"for",
"idx",
",",
"lr",
"in",
"enumerate",
"(",
"learning_rate_scalar",
"[",
"1",
":",
"]",
")",
":",
"logs",
"[",
"f\"learning_rate/{idx+1}\"",
"]",
"=",
"lr",
"logs",
"[",
"\"loss\"",
"]",
"=",
"loss_scalar",
"if",
"teacher",
"is",
"not",
"None",
":",
"logs",
"[",
"\"loss/distil\"",
"]",
"=",
"loss_logits",
".",
"item",
"(",
")",
"if",
"args",
".",
"regularization",
"is",
"not",
"None",
":",
"logs",
"[",
"\"loss/regularization\"",
"]",
"=",
"regu_",
".",
"item",
"(",
")",
"if",
"(",
"teacher",
"is",
"not",
"None",
")",
"or",
"(",
"args",
".",
"regularization",
"is",
"not",
"None",
")",
":",
"if",
"(",
"teacher",
"is",
"not",
"None",
")",
"and",
"(",
"args",
".",
"regularization",
"is",
"not",
"None",
")",
":",
"logs",
"[",
"\"loss/instant_ce\"",
"]",
"=",
"(",
"loss",
".",
"item",
"(",
")",
"-",
"regu_lambda",
"*",
"logs",
"[",
"\"loss/regularization\"",
"]",
"-",
"args",
".",
"alpha_distil",
"*",
"logs",
"[",
"\"loss/distil\"",
"]",
")",
"/",
"args",
".",
"alpha_ce",
"elif",
"teacher",
"is",
"not",
"None",
":",
"logs",
"[",
"\"loss/instant_ce\"",
"]",
"=",
"(",
"loss",
".",
"item",
"(",
")",
"-",
"args",
".",
"alpha_distil",
"*",
"logs",
"[",
"\"loss/distil\"",
"]",
")",
"/",
"args",
".",
"alpha_ce",
"else",
":",
"logs",
"[",
"\"loss/instant_ce\"",
"]",
"=",
"loss",
".",
"item",
"(",
")",
"-",
"regu_lambda",
"*",
"logs",
"[",
"\"loss/regularization\"",
"]",
"logging_loss",
"=",
"tr_loss",
"for",
"key",
",",
"value",
"in",
"logs",
".",
"items",
"(",
")",
":",
"tb_writer",
".",
"add_scalar",
"(",
"key",
",",
"value",
",",
"global_step",
")",
"print",
"(",
"json",
".",
"dumps",
"(",
"{",
"*",
"*",
"logs",
",",
"*",
"*",
"{",
"\"step\"",
":",
"global_step",
"}",
"}",
")",
")",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
"and",
"args",
".",
"save_steps",
">",
"0",
"and",
"global_step",
"%",
"args",
".",
"save_steps",
"==",
"0",
":",
"# Save model checkpoint",
"output_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"output_dir",
",",
"\"checkpoint-{}\"",
".",
"format",
"(",
"global_step",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"output_dir",
")",
":",
"os",
".",
"makedirs",
"(",
"output_dir",
")",
"model_to_save",
"=",
"(",
"model",
".",
"module",
"if",
"hasattr",
"(",
"model",
",",
"\"module\"",
")",
"else",
"model",
")",
"# Take care of distributed/parallel training",
"model_to_save",
".",
"save_pretrained",
"(",
"output_dir",
")",
"tokenizer",
".",
"save_pretrained",
"(",
"output_dir",
")",
"torch",
".",
"save",
"(",
"args",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"\"training_args.bin\"",
")",
")",
"logger",
".",
"info",
"(",
"\"Saving model checkpoint to %s\"",
",",
"output_dir",
")",
"torch",
".",
"save",
"(",
"optimizer",
".",
"state_dict",
"(",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"\"optimizer.pt\"",
")",
")",
"torch",
".",
"save",
"(",
"scheduler",
".",
"state_dict",
"(",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"\"scheduler.pt\"",
")",
")",
"logger",
".",
"info",
"(",
"\"Saving optimizer and scheduler states to %s\"",
",",
"output_dir",
")",
"if",
"args",
".",
"max_steps",
">",
"0",
"and",
"global_step",
">",
"args",
".",
"max_steps",
":",
"epoch_iterator",
".",
"close",
"(",
")",
"break",
"if",
"args",
".",
"max_steps",
">",
"0",
"and",
"global_step",
">",
"args",
".",
"max_steps",
":",
"train_iterator",
".",
"close",
"(",
")",
"break",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
":",
"tb_writer",
".",
"close",
"(",
")",
"return",
"global_step",
",",
"tr_loss",
"/",
"global_step"
] | [
106,
0
] | [
417,
45
] | python | en | ['en', 'it', 'en'] | True |